mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-11 08:28:11 -05:00
Compare commits
76 Commits
debug-mac
...
js-quickst
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
540cea3d1a | ||
|
|
9334368a42 | ||
|
|
2a650349cb | ||
|
|
5f7cc32127 | ||
|
|
abdab54503 | ||
|
|
e78bce32dc | ||
|
|
3727b1d053 | ||
|
|
7eff0f9ac7 | ||
|
|
023fcb9163 | ||
|
|
4468bc920b | ||
|
|
9a55b80482 | ||
|
|
3c5d2858e3 | ||
|
|
e5ac5ba9ee | ||
|
|
2bb790e4f8 | ||
|
|
2083ba5048 | ||
|
|
53afed5b76 | ||
|
|
2b2732ec39 | ||
|
|
a008fa2a5d | ||
|
|
9b1505e4bd | ||
|
|
b7795c8857 | ||
|
|
000831c15b | ||
|
|
5c54cc973d | ||
|
|
8cc91ee3f7 | ||
|
|
ed5ef4caea | ||
|
|
f3599627b8 | ||
|
|
208df0a428 | ||
|
|
313d3ca0d0 | ||
|
|
4dae5a6ed7 | ||
|
|
26bdba46ca | ||
|
|
a817b120ca | ||
|
|
8ce311f256 | ||
|
|
86227e3104 | ||
|
|
206bea4575 | ||
|
|
65843621c5 | ||
|
|
e681a7e36c | ||
|
|
0d1cadb245 | ||
|
|
6d27dabfb2 | ||
|
|
45f626a0d3 | ||
|
|
45183be932 | ||
|
|
ecfcf28d42 | ||
|
|
ddd29d4646 | ||
|
|
bdf9f3717b | ||
|
|
37bce26e4d | ||
|
|
1e90d847f9 | ||
|
|
f312fc01b2 | ||
|
|
4998cae260 | ||
|
|
138419b453 | ||
|
|
2b69700c5e | ||
|
|
c081ace46b | ||
|
|
edf32abd84 | ||
|
|
aa8dbec970 | ||
|
|
a7963c5a83 | ||
|
|
ea3c805467 | ||
|
|
ebbbe4c409 | ||
|
|
f9743ecf7e | ||
|
|
d3693c0d6b | ||
|
|
1a1815d822 | ||
|
|
391cb5bfe8 | ||
|
|
2bdcc0841a | ||
|
|
a6693ab8b0 | ||
|
|
e1325880d1 | ||
|
|
b7230a93df | ||
|
|
32712fa018 | ||
|
|
35e0919184 | ||
|
|
72a7282797 | ||
|
|
29fe3b93cd | ||
|
|
fb3f66acf4 | ||
|
|
1f95eb134b | ||
|
|
4c240ac3c9 | ||
|
|
c6ab74c5da | ||
|
|
04e2529ba9 | ||
|
|
588a690dde | ||
|
|
2d65ff5c63 | ||
|
|
9cc63bc52b | ||
|
|
c673f27b44 | ||
|
|
fe7caa08d6 |
@@ -425,6 +425,26 @@ steps:
|
||||
"Valkey" \
|
||||
valkey \
|
||||
valkey
|
||||
|
||||
- id: "firestore"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
- "FIRESTORE_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"Firestore" \
|
||||
firestore \
|
||||
firestore
|
||||
|
||||
|
||||
availableSecrets:
|
||||
|
||||
4
.github/blunderbuss.yml
vendored
4
.github/blunderbuss.yml
vendored
@@ -1,7 +1,7 @@
|
||||
assign_issues:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- akitsch
|
||||
assign_issues_by:
|
||||
- labels:
|
||||
- 'product: bigquery'
|
||||
@@ -10,6 +10,6 @@ assign_issues_by:
|
||||
- shobsi
|
||||
- jiaxunwu
|
||||
assign_prs:
|
||||
- kurtisvg
|
||||
- Yuan325
|
||||
- duwenxin99
|
||||
- akitsch
|
||||
|
||||
1
.github/release-please.yml
vendored
1
.github/release-please.yml
vendored
@@ -20,6 +20,7 @@ extraFiles: [
|
||||
"README.md",
|
||||
"docs/en/getting-started/introduction/_index.md",
|
||||
"docs/en/getting-started/local_quickstart.md",
|
||||
"docs/en/getting-started/local_quickstart_js.md",
|
||||
"docs/en/getting-started/mcp_quickstart/_index.md",
|
||||
"docs/en/samples/bigquery/local_quickstart.md",
|
||||
"docs/en/samples/bigquery/mcp_quickstart/_index.md",
|
||||
|
||||
1
.github/workflows/docs_preview_deploy.yaml
vendored
1
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -51,7 +51,6 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
- name: Setup Hugo
|
||||
|
||||
1
.hugo/assets/scss/_styles_project.scss
Normal file
1
.hugo/assets/scss/_styles_project.scss
Normal file
@@ -0,0 +1 @@
|
||||
@import 'td/code-dark';
|
||||
@@ -48,7 +48,11 @@ enableRobotsTXT = true
|
||||
pre = "<i class='fa-brands fa-github'></i>"
|
||||
|
||||
[markup.goldmark.renderer]
|
||||
unsafe= true
|
||||
unsafe= true
|
||||
|
||||
[markup.highlight]
|
||||
noClasses = false
|
||||
style = "tango"
|
||||
|
||||
[outputFormats]
|
||||
[outputFormats.LLMS]
|
||||
|
||||
2
.hugo/layouts/shortcodes/include.html
Normal file
2
.hugo/layouts/shortcodes/include.html
Normal file
@@ -0,0 +1,2 @@
|
||||
{{ $file := .Get 0 }}
|
||||
{{ (printf "%s%s" .Page.File.Dir $file) | readFile | replaceRE "^---[\\s\\S]+?---" "" | safeHTML }}
|
||||
17
CHANGELOG.md
17
CHANGELOG.md
@@ -1,5 +1,22 @@
|
||||
# Changelog
|
||||
|
||||
## [0.9.0](https://github.com/googleapis/genai-toolbox/compare/v0.8.0...v0.9.0) (2025-07-11)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Dynamic reloading for toolbox config ([#800](https://github.com/googleapis/genai-toolbox/issues/800)) ([4c240ac](https://github.com/googleapis/genai-toolbox/commit/4c240ac3c961cd14738c998ba2d10d5235ef523e))
|
||||
* **sources/mysql:** Add queryTimeout support to MySQL source ([#830](https://github.com/googleapis/genai-toolbox/issues/830)) ([391cb5b](https://github.com/googleapis/genai-toolbox/commit/391cb5bfe845e554411240a1d9838df5331b25fa))
|
||||
* **tools/bigquery:** Add optional projectID parameter to bigquery tools ([#799](https://github.com/googleapis/genai-toolbox/issues/799)) ([c6ab74c](https://github.com/googleapis/genai-toolbox/commit/c6ab74c5dad53a0e7885a18438ab3be36b9b7cb3))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Cleanup unassigned err log ([#857](https://github.com/googleapis/genai-toolbox/issues/857)) ([c081ace](https://github.com/googleapis/genai-toolbox/commit/c081ace46bb24cb3fd2adb21d519489be0d3f3c3))
|
||||
* Fix docs preview deployment pipeline ([#787](https://github.com/googleapis/genai-toolbox/issues/787)) ([0a93b04](https://github.com/googleapis/genai-toolbox/commit/0a93b0482c8d3c64b324e67408d408f5576ecaf3))
|
||||
* **tools:** Nil parameter error when arrays are used ([#801](https://github.com/googleapis/genai-toolbox/issues/801)) ([2bdcc08](https://github.com/googleapis/genai-toolbox/commit/2bdcc0841ab37d18e2f0d6fe63fb6f10da3e302b))
|
||||
* Trigger reload on additional fsnotify operations ([#854](https://github.com/googleapis/genai-toolbox/issues/854)) ([aa8dbec](https://github.com/googleapis/genai-toolbox/commit/aa8dbec97095cf0d7ac771c8084a84e2d3d8ce4e))
|
||||
|
||||
## [0.8.0](https://github.com/googleapis/genai-toolbox/compare/v0.7.0...v0.8.0) (2025-07-02)
|
||||
|
||||
|
||||
|
||||
289
README.md
289
README.md
@@ -2,7 +2,9 @@
|
||||
|
||||
# MCP Toolbox for Databases
|
||||
|
||||
[](https://discord.gg/Dmm69peqjh)
|
||||
[](https://googleapis.github.io/genai-toolbox/)
|
||||
[](https://discord.gg/Dmm69peqjh)
|
||||
[](https://medium.com/@mcp_toolbox)
|
||||
[](https://goreportcard.com/report/github.com/googleapis/genai-toolbox)
|
||||
|
||||
> [!NOTE]
|
||||
@@ -38,6 +40,7 @@ documentation](https://googleapis.github.io/genai-toolbox/).
|
||||
- [Toolsets](#toolsets)
|
||||
- [Versioning](#versioning)
|
||||
- [Contributing](#contributing)
|
||||
- [Community](#community)
|
||||
|
||||
<!-- /TOC -->
|
||||
|
||||
@@ -111,7 +114,7 @@ To install Toolbox as a binary:
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.8.0
|
||||
export VERSION=0.9.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -124,7 +127,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.8.0
|
||||
export VERSION=0.9.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -137,7 +140,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.9.0
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -151,6 +154,8 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
> [!NOTE]
|
||||
> Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -165,7 +170,7 @@ Once your server is up and running, you can load the tools into your
|
||||
application. See below the list of Client SDKs for using various frameworks:
|
||||
|
||||
<details open>
|
||||
<summary>Python</summary>
|
||||
<summary>Python (<a href="https://github.com/googleapis/mcp-toolbox-sdk-python">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
@@ -256,7 +261,7 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
</details>
|
||||
</blockquote>
|
||||
<details>
|
||||
<summary>Javascript/Typescript</summary>
|
||||
<summary>Javascript/Typescript (<a href="https://github.com/googleapis/mcp-toolbox-sdk-js">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
@@ -368,7 +373,273 @@ For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
|
||||
<details>
|
||||
<summary>Go (<a href="https://github.com/googleapis/mcp-toolbox-sdk-go">Github</a>)</summary>
|
||||
<br>
|
||||
<blockquote>
|
||||
|
||||
<details open>
|
||||
<summary>Core</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
1. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"context"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000";
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tools
|
||||
tools, err := client.LoadToolset("toolsetName", ctx)
|
||||
}
|
||||
```
|
||||
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README][toolbox-core-go-readme].
|
||||
|
||||
[toolbox-go]: https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core
|
||||
[toolbox-core-go-readme]: https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>LangChain Go</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with LangChainGo
|
||||
langChainTool := llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Genkit</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/invopop/jsonschema"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
g, err := genkit.Init(ctx)
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var schema *jsonschema.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
executeFn := func(ctx *ai.ToolContext, input any) (string, error) {
|
||||
result, err := tool.Invoke(ctx, input.(map[string]any))
|
||||
if err != nil {
|
||||
// Propagate errors from the tool invocation.
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result.(string), nil
|
||||
}
|
||||
|
||||
// Use this tool with Genkit Go
|
||||
genkitTool := genkit.DefineToolWithInputSchema(
|
||||
g,
|
||||
tool.Name(),
|
||||
tool.Description(),
|
||||
schema,
|
||||
executeFn,
|
||||
)
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>Go GenAI</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var schema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: schema,
|
||||
}
|
||||
|
||||
// Use this tool with Go GenAI
|
||||
genAITool := &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
<details>
|
||||
<summary>OpenAI Go</summary>
|
||||
|
||||
1. Install [Toolbox Go SDK][toolbox-go]:
|
||||
|
||||
```bash
|
||||
go get github.com/googleapis/mcp-toolbox-sdk-go
|
||||
```
|
||||
|
||||
2. Load tools:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
|
||||
var paramsSchema openai.FunctionParameters
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with OpenAI Go
|
||||
openAITool := openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: tool.Name(),
|
||||
Description: openai.String(tool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
```
|
||||
|
||||
</details>
|
||||
</details>
|
||||
</blockquote>
|
||||
</details>
|
||||
|
||||
## Configuration
|
||||
@@ -467,3 +738,7 @@ to get started.
|
||||
Please note that this project is released with a Contributor Code of Conduct.
|
||||
By participating in this project you agree to abide by its terms. See
|
||||
[Contributor Code of Conduct](CODE_OF_CONDUCT.md) for more information.
|
||||
|
||||
## Community
|
||||
|
||||
Join our [discord community](https://discord.gg/GQrFB3Ec3W) to connect with our developers!
|
||||
|
||||
239
cmd/root.go
239
cmd/root.go
@@ -19,20 +19,26 @@ import (
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"maps"
|
||||
"os"
|
||||
"os/signal"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"slices"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
// Import tool packages for side effect of registration
|
||||
@@ -46,18 +52,25 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlitesql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
@@ -70,6 +83,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
|
||||
@@ -178,6 +192,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
@@ -347,7 +362,7 @@ func loadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile,
|
||||
|
||||
// Combine both file lists
|
||||
allFiles := append(yamlFiles, ymlFiles...)
|
||||
|
||||
|
||||
if len(allFiles) == 0 {
|
||||
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
|
||||
}
|
||||
@@ -356,6 +371,178 @@ func loadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile,
|
||||
return loadAndMergeToolsFiles(ctx, allFiles)
|
||||
}
|
||||
|
||||
func handleDynamicReload(ctx context.Context, toolsFile ToolsFile, s *server.Server) error {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := validateReloadEdits(ctx, toolsFile)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to validate reloaded edits: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
s.ResourceMgr.SetResources(sourcesMap, authServicesMap, toolsMap, toolsetsMap)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// validateReloadEdits checks that the reloaded tools file configs can initialized without failing
|
||||
func validateReloadEdits(
|
||||
ctx context.Context, toolsFile ToolsFile,
|
||||
) (map[string]sources.Source, map[string]auth.AuthService, map[string]tools.Tool, map[string]tools.Toolset, error,
|
||||
) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "Attempting to parse and validate reloaded tools file.")
|
||||
|
||||
ctx, span := instrumentation.Tracer.Start(ctx, "toolbox/server/reload")
|
||||
defer span.End()
|
||||
|
||||
reloadedConfig := server.ServerConfig{
|
||||
Version: versionString,
|
||||
SourceConfigs: toolsFile.Sources,
|
||||
AuthServiceConfigs: toolsFile.AuthServices,
|
||||
ToolConfigs: toolsFile.Tools,
|
||||
ToolsetConfigs: toolsFile.Toolsets,
|
||||
}
|
||||
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := server.InitializeConfigs(ctx, reloadedConfig)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to initialize reloaded configs: %w", err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
|
||||
}
|
||||
|
||||
// watchChanges checks for changes in the provided yaml tools file(s) or folder.
|
||||
func watchChanges(ctx context.Context, watchDirs map[string]bool, watchedFiles map[string]bool, s *server.Server) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
w, err := fsnotify.NewWatcher()
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error setting up new watcher %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
defer w.Close()
|
||||
|
||||
watchingFolder := false
|
||||
var folderToWatch string
|
||||
|
||||
// if watchedFiles is empty, indicates that user passed entire folder instead
|
||||
if len(watchedFiles) == 0 {
|
||||
watchingFolder = true
|
||||
|
||||
// validate that watchDirs only has single element
|
||||
if len(watchDirs) > 1 {
|
||||
logger.WarnContext(ctx, "error setting watcher, expected single tools folder if no file(s) are defined.")
|
||||
return
|
||||
}
|
||||
|
||||
for onlyKey := range watchDirs {
|
||||
folderToWatch = onlyKey
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
for dir := range watchDirs {
|
||||
err := w.Add(dir)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, fmt.Sprintf("Error adding path %s to watcher: %s", dir, err))
|
||||
break
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("Added directory %s to watcher.", dir))
|
||||
}
|
||||
|
||||
// debounce timer is used to prevent multiple writes triggering multiple reloads
|
||||
debounceDelay := 100 * time.Millisecond
|
||||
debounce := time.NewTimer(1 * time.Minute)
|
||||
debounce.Stop()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
logger.DebugContext(ctx, "file watcher context cancelled")
|
||||
return
|
||||
case err, ok := <-w.Errors:
|
||||
if !ok {
|
||||
logger.WarnContext(ctx, "file watcher was closed unexpectedly")
|
||||
return
|
||||
}
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "file watcher error %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
case e, ok := <-w.Events:
|
||||
if !ok {
|
||||
logger.WarnContext(ctx, "file watcher already closed")
|
||||
return
|
||||
}
|
||||
|
||||
// only check for events which indicate user saved a new tools file
|
||||
// multiple operations checked due to various file update methods across editors
|
||||
if !e.Has(fsnotify.Write | fsnotify.Create | fsnotify.Rename) {
|
||||
continue
|
||||
}
|
||||
|
||||
cleanedFilename := filepath.Clean(e.Name)
|
||||
logger.DebugContext(ctx, fmt.Sprintf("%s event detected in %s", e.Op, cleanedFilename))
|
||||
|
||||
folderChanged := watchingFolder &&
|
||||
(strings.HasSuffix(cleanedFilename, ".yaml") || strings.HasSuffix(cleanedFilename, ".yml"))
|
||||
|
||||
if folderChanged || watchedFiles[cleanedFilename] {
|
||||
// indicates the write event is on a relevant file
|
||||
debounce.Reset(debounceDelay)
|
||||
}
|
||||
|
||||
case <-debounce.C:
|
||||
debounce.Stop()
|
||||
var reloadedToolsFile ToolsFile
|
||||
|
||||
if watchingFolder {
|
||||
logger.DebugContext(ctx, "Reloading tools folder.")
|
||||
reloadedToolsFile, err = loadAndMergeToolsFolder(ctx, folderToWatch)
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error loading tools folder %s", err)
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
logger.DebugContext(ctx, "Reloading tools file(s).")
|
||||
reloadedToolsFile, err = loadAndMergeToolsFiles(ctx, slices.Collect(maps.Keys(watchedFiles)))
|
||||
if err != nil {
|
||||
logger.WarnContext(ctx, "error loading tools files %s", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
err = handleDynamicReload(ctx, reloadedToolsFile, s)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse reloaded tools file at %q: %w", reloadedToolsFile, err)
|
||||
logger.WarnContext(ctx, errMsg.Error())
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// updateLogLevel checks if Toolbox have to update the existing log level set by users.
|
||||
// stdio doesn't support "debug" and "info" logs.
|
||||
func updateLogLevel(stdio bool, logLevel string) bool {
|
||||
@@ -370,6 +557,33 @@ func updateLogLevel(stdio bool, logLevel string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func resolveWatcherInputs(toolsFile string, toolsFiles []string, toolsFolder string) (map[string]bool, map[string]bool) {
|
||||
var relevantFiles []string
|
||||
|
||||
// map for efficiently checking if a file is relevant
|
||||
watchedFiles := make(map[string]bool)
|
||||
|
||||
// dirs that will be added to watcher (fsnotify prefers watching directory then filtering for file)
|
||||
watchDirs := make(map[string]bool)
|
||||
|
||||
if len(toolsFiles) > 0 {
|
||||
relevantFiles = toolsFiles
|
||||
} else if toolsFolder != "" {
|
||||
watchDirs[filepath.Clean(toolsFolder)] = true
|
||||
} else {
|
||||
relevantFiles = []string{toolsFile}
|
||||
}
|
||||
|
||||
// extract parent dir for relevant files and dedup
|
||||
for _, f := range relevantFiles {
|
||||
cleanFile := filepath.Clean(f)
|
||||
watchedFiles[cleanFile] = true
|
||||
watchDirs[filepath.Dir(cleanFile)] = true
|
||||
}
|
||||
|
||||
return watchDirs, watchedFiles
|
||||
}
|
||||
|
||||
func run(cmd *Command) error {
|
||||
if updateLogLevel(cmd.cfg.Stdio, cmd.cfg.LogLevel.String()) {
|
||||
cmd.cfg.LogLevel = server.StringLevel(log.Warn)
|
||||
@@ -466,6 +680,7 @@ func run(cmd *Command) error {
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use multiple tools files
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(cmd.tools_files)))
|
||||
var err error
|
||||
@@ -481,6 +696,7 @@ func run(cmd *Command) error {
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
// Use tools folder
|
||||
cmd.logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", cmd.tools_folder))
|
||||
var err error
|
||||
@@ -494,6 +710,7 @@ func run(cmd *Command) error {
|
||||
if cmd.tools_file == "" {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
}
|
||||
|
||||
// Read single tool file contents
|
||||
buf, err := os.ReadFile(cmd.tools_file)
|
||||
if err != nil {
|
||||
@@ -517,8 +734,17 @@ func run(cmd *Command) error {
|
||||
cmd.cfg.AuthServiceConfigs = authSourceConfigs
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
// start server
|
||||
s, err := server.NewServer(ctx, cmd.cfg, cmd.logger)
|
||||
s, err := server.NewServer(ctx, cmd.cfg)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("toolbox failed to initialize: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
@@ -553,6 +779,13 @@ func run(cmd *Command) error {
|
||||
}()
|
||||
}
|
||||
|
||||
watchDirs, watchedFiles := resolveWatcherInputs(cmd.tools_file, cmd.tools_files, cmd.tools_folder)
|
||||
|
||||
if !cmd.cfg.DisableReload {
|
||||
// start watching the file(s) or folder for changes to trigger dynamic reloading
|
||||
go watchChanges(ctx, watchDirs, watchedFiles, s)
|
||||
}
|
||||
|
||||
// wait for either the server to error out or the command's context to be canceled
|
||||
select {
|
||||
case err := <-srvErr:
|
||||
|
||||
206
cmd/root_test.go
206
cmd/root_test.go
@@ -16,23 +16,33 @@ package cmd
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@@ -174,6 +184,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
Stdio: true,
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "disable reload",
|
||||
args: []string{"--disable-reload"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
DisableReload: true,
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -965,12 +982,191 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
// normalizeFilepaths is a helper function to allow same filepath formats for Mac and Windows.
|
||||
// this prevents needing multiple "want" cases for TestResolveWatcherInputs
|
||||
func normalizeFilepaths(m map[string]bool) map[string]bool {
|
||||
newMap := make(map[string]bool)
|
||||
for k, v := range m {
|
||||
newMap[filepath.ToSlash(k)] = v
|
||||
}
|
||||
return newMap
|
||||
}
|
||||
|
||||
func TestResolveWatcherInputs(t *testing.T) {
|
||||
tcs := []struct {
|
||||
description string
|
||||
toolsFile string
|
||||
toolsFiles []string
|
||||
toolsFolder string
|
||||
wantWatchDirs map[string]bool
|
||||
wantWatchedFiles map[string]bool
|
||||
}{
|
||||
{
|
||||
description: "single tools file",
|
||||
toolsFile: "tools_folder/example_tools.yaml",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{"tools_folder/example_tools.yaml": true},
|
||||
},
|
||||
{
|
||||
description: "default tools file (root dir)",
|
||||
toolsFile: "tools.yaml",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{".": true},
|
||||
wantWatchedFiles: map[string]bool{"tools.yaml": true},
|
||||
},
|
||||
{
|
||||
description: "multiple files in different folders",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{"tools_folder/example_tools.yaml", "tools_folder2/example_tools.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true, "tools_folder2": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder2/example_tools.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "multiple files in same folder",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{"tools_folder/example_tools.yaml", "tools_folder/example_tools2.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder/example_tools2.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "multiple files in different levels",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{
|
||||
"tools_folder/example_tools.yaml",
|
||||
"tools_folder/special_tools/example_tools2.yaml"},
|
||||
toolsFolder: "",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true, "tools_folder/special_tools": true},
|
||||
wantWatchedFiles: map[string]bool{
|
||||
"tools_folder/example_tools.yaml": true,
|
||||
"tools_folder/special_tools/example_tools2.yaml": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "tools folder",
|
||||
toolsFile: "",
|
||||
toolsFiles: []string{},
|
||||
toolsFolder: "tools_folder",
|
||||
wantWatchDirs: map[string]bool{"tools_folder": true},
|
||||
wantWatchedFiles: map[string]bool{},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.description, func(t *testing.T) {
|
||||
gotWatchDirs, gotWatchedFiles := resolveWatcherInputs(tc.toolsFile, tc.toolsFiles, tc.toolsFolder)
|
||||
|
||||
normalizedGotWatchDirs := normalizeFilepaths(gotWatchDirs)
|
||||
normalizedGotWatchedFiles := normalizeFilepaths(gotWatchedFiles)
|
||||
|
||||
if diff := cmp.Diff(tc.wantWatchDirs, normalizedGotWatchDirs); diff != "" {
|
||||
t.Errorf("incorrect watchDirs: diff %v", diff)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantWatchedFiles, normalizedGotWatchedFiles); diff != "" {
|
||||
t.Errorf("incorrect watchedFiles: diff %v", diff)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// helper function for testing file detection in dynamic reloading
|
||||
func tmpFileWithCleanup(content []byte) (string, func(), error) {
|
||||
f, err := os.CreateTemp("", "*")
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
cleanup := func() { os.Remove(f.Name()) }
|
||||
|
||||
if _, err := f.Write(content); err != nil {
|
||||
cleanup()
|
||||
return "", nil, err
|
||||
}
|
||||
if err := f.Close(); err != nil {
|
||||
cleanup()
|
||||
return "", nil, err
|
||||
}
|
||||
return f.Name(), cleanup, err
|
||||
}
|
||||
|
||||
func TestSingleEdit(t *testing.T) {
|
||||
ctx, cancelCtx := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancelCtx()
|
||||
|
||||
pr, pw := io.Pipe()
|
||||
defer pw.Close()
|
||||
defer pr.Close()
|
||||
|
||||
fileToWatch, cleanup, err := tmpFileWithCleanup([]byte("initial content"))
|
||||
if err != nil {
|
||||
t.Fatalf("error editing tools file %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
logger, err := log.NewStdLogger(pw, pw, "DEBUG")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup logger %s", err)
|
||||
}
|
||||
ctx = util.WithLogger(ctx, logger)
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(versionString)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup instrumentation %s", err)
|
||||
}
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
mockServer := &server.Server{}
|
||||
|
||||
cleanFileToWatch := filepath.Clean(fileToWatch)
|
||||
watchDir := filepath.Dir(cleanFileToWatch)
|
||||
|
||||
watchedFiles := map[string]bool{cleanFileToWatch: true}
|
||||
watchDirs := map[string]bool{watchDir: true}
|
||||
|
||||
go watchChanges(ctx, watchDirs, watchedFiles, mockServer)
|
||||
|
||||
// escape backslash so regex doesn't fail on windows filepaths
|
||||
regexEscapedPathFile := strings.ReplaceAll(cleanFileToWatch, `\`, `\\\\*\\`)
|
||||
regexEscapedPathFile = path.Clean(regexEscapedPathFile)
|
||||
|
||||
regexEscapedPathDir := strings.ReplaceAll(watchDir, `\`, `\\\\*\\`)
|
||||
regexEscapedPathDir = path.Clean(regexEscapedPathDir)
|
||||
|
||||
begunWatchingDir := regexp.MustCompile(fmt.Sprintf(`DEBUG "Added directory %s to watcher."`, regexEscapedPathDir))
|
||||
_, err = testutils.WaitForString(ctx, begunWatchingDir, pr)
|
||||
if err != nil {
|
||||
t.Fatalf("timeout or error waiting for watcher to start: %s", err)
|
||||
}
|
||||
|
||||
err = os.WriteFile(fileToWatch, []byte("modification"), 0777)
|
||||
if err != nil {
|
||||
t.Fatalf("error writing to file: %v", err)
|
||||
}
|
||||
|
||||
// only check substring of DEBUG message due to some OS/editors firing different operations
|
||||
detectedFileChange := regexp.MustCompile(fmt.Sprintf(`event detected in %s"`, regexEscapedPathFile))
|
||||
_, err = testutils.WaitForString(ctx, detectedFileChange, pr)
|
||||
if err != nil {
|
||||
t.Fatalf("timeout or error waiting for file to detect write: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrebuiltTools(t *testing.T) {
|
||||
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
||||
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
||||
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
|
||||
firestoreconfig, _ := prebuiltconfigs.Get("firestore")
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
@@ -1033,6 +1229,16 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "firestore prebuilt tools",
|
||||
in: firestoreconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"firestore-database-tools": tools.ToolsetConfig{
|
||||
Name: "firestore-database-tools",
|
||||
ToolNames: []string{"firestore-get-documents", "firestore-list-collections", "firestore-delete-documents", "firestore-query-collection", "firestore-get-rules", "firestore-validate-rules"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "postgres prebuilt tools",
|
||||
in: postgresconfig,
|
||||
|
||||
@@ -1 +1 @@
|
||||
0.8.0
|
||||
0.9.0
|
||||
|
||||
663
docs/en/getting-started/JS Quickstart (Local).md
Normal file
663
docs/en/getting-started/JS Quickstart (Local).md
Normal file
@@ -0,0 +1,663 @@
|
||||
---
|
||||
title: "JS Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox locally with JavaScript, PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/), [LlamaIndex](https://ts.llamaindex.ai/), or [GenkitJS](https://genkit.dev/docs/get-started/).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Node.js (v18 or higher)].
|
||||
2. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using Gemini or PaLM models), follow these one-time setup steps:
|
||||
|
||||
> - [Install the Google Cloud CLI]
|
||||
> - [Set up Application Default Credentials (ADC)]
|
||||
|
||||
#### Set your project and enable Vertex AI
|
||||
|
||||
```bash
|
||||
gcloud config set project YOUR_PROJECT_ID
|
||||
gcloud services enable aiplatform.googleapis.com
|
||||
```
|
||||
|
||||
[Node.js (v18 or higher)]: https://nodejs.org/
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
|
||||
[Set up Application Default Credentials (ADC)]: https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create a table using the following command:
|
||||
|
||||
```sql
|
||||
CREATE TABLE hotels(
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
location VARCHAR NOT NULL,
|
||||
price_tier VARCHAR NOT NULL,
|
||||
checkin_date DATE NOT NULL,
|
||||
checkout_date DATE NOT NULL,
|
||||
booked BIT NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert data into the table.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
|
||||
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
|
||||
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
|
||||
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
|
||||
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
|
||||
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
|
||||
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
`tools.yaml`, and then run the Toolbox server.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Write the following into a `tools.yaml` file. Be sure to update any fields
|
||||
such as `user`, `password`, or `database` that you may have customized in the
|
||||
previous step.
|
||||
|
||||
{{< notice tip >}}
|
||||
In practice, use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the `Resources` section of the docs.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
In this section, we will write and run an agent that will load the Tools
|
||||
from Toolbox.
|
||||
|
||||
First let's create a new folder for your project, initialize it with `npm`, and install the required dependencies.
|
||||
|
||||
1. Create a new folder for your project and navigate into it:
|
||||
|
||||
```bash
|
||||
mkdir my-agent-app
|
||||
cd my-agent-app
|
||||
```
|
||||
|
||||
1. Initialize a new Node.js project:
|
||||
|
||||
```bash
|
||||
npm init -y
|
||||
```
|
||||
|
||||
1. Create a new file in the root directory:
|
||||
|
||||
```bash
|
||||
touch index.js
|
||||
```
|
||||
|
||||
4. Next, depending on which orchestration framework you want to use, install the relevant dependencies:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="bash" >}}
|
||||
npm install langchain @genai-toolbox/sdk @langchain/google-vertexai dotenv
|
||||
{{< /tab >}}
|
||||
{{< tab header="LlamaIndex" lang="bash" >}}
|
||||
npm install @llamaindex/core @llamaindex/llms-google-genai @genai-toolbox/sdk dotenv
|
||||
{{< /tab >}}
|
||||
{{< tab header="GenkitJS" lang="bash" >}}
|
||||
npm install @toolbox-sdk/core genkit @genkit-ai/vertexai dotenv
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
5. Now copy the below code in your `index.js` file based on your orchestration framework.
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="js" >}}
|
||||
|
||||
import "dotenv/config";
|
||||
import { ChatVertexAI } from "@langchain/google-vertexai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { HumanMessage, ToolMessage } from "@langchain/core/messages";
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function runApplication() {
|
||||
console.log("Starting hotel agent...");
|
||||
|
||||
const model = new ChatVertexAI({
|
||||
model: "gemini-2.0-flash-001",
|
||||
temperature: 0,
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
console.log(`Loaded ${toolboxTools.length} tools from Toolbox`);
|
||||
|
||||
const tools = toolboxTools
|
||||
.map((t) => {
|
||||
return tool(t, {
|
||||
name: t.toolName,
|
||||
description: t.description,
|
||||
schema: t.params,
|
||||
});
|
||||
})
|
||||
.filter(Boolean);
|
||||
|
||||
const modelWithTools = model.bindTools(tools);
|
||||
|
||||
let messages = [new HumanMessage(prompt)];
|
||||
|
||||
for (const query of queries) {
|
||||
console.log(`\nUser: ${query}`);
|
||||
|
||||
messages.push(new HumanMessage(query));
|
||||
|
||||
for (let step = 0; step < 5; step++) {
|
||||
const response = await modelWithTools.invoke(messages);
|
||||
|
||||
if (!response.tool_calls || response.tool_calls.length === 0) {
|
||||
console.log("Agent:", response.content);
|
||||
messages.push(response);
|
||||
break;
|
||||
}
|
||||
|
||||
console.log("Agent decided to use tools:", response.tool_calls);
|
||||
messages.push(response);
|
||||
|
||||
const toolMessages = await Promise.all(
|
||||
response.tool_calls.map(async (call) => {
|
||||
const toolToCall = tools.find((t) => t.name === call.name);sources:
|
||||
my-valkey-instance:
|
||||
kind: valkey
|
||||
address:
|
||||
- 10.128.0.2:6379
|
||||
useGCPIAM: true
|
||||
|
||||
if (!toolToCall) {
|
||||
return new ToolMessage({
|
||||
content: `Error: Tool ${call.name} not found`,
|
||||
tool_call_id: call.id,
|
||||
});
|
||||
}
|
||||
try {
|
||||
const result = await toolToCall.invoke(call.args);
|
||||
return new ToolMessage({
|
||||
content: JSON.stringify(result ?? "No result returned."),
|
||||
tool_call_id: call.id,
|
||||
});
|
||||
} catch (e) {
|
||||
return new ToolMessage({
|
||||
content: `Error: ${e.message}`,
|
||||
tool_call_id: call.id,
|
||||
});
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
messages.push(...toolMessages);
|
||||
}
|
||||
}
|
||||
if (client.close) {
|
||||
await client.close();
|
||||
}
|
||||
}
|
||||
|
||||
runApplication()
|
||||
.catch(console.error)
|
||||
.finally(() => console.log("\nApplication finished."));
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="LlamaIndex" lang="js" >}}
|
||||
|
||||
import "dotenv/config";
|
||||
import { LlamaIndexAgent } from "@llamaindex/core";
|
||||
import { GoogleGenAI } from "@llamaindex/llms-google-genai";
|
||||
import { ToolboxClient } from "@genai-toolbox/sdk";
|
||||
|
||||
// Sample prompt and queries
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and cancellations.
|
||||
... (same as above) ...
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
// ...more queries...
|
||||
];
|
||||
|
||||
async function runApplication() {
|
||||
const llm = new GoogleGenAI({
|
||||
model: "gemini-2.0-flash-001",
|
||||
// Add any required config here
|
||||
});
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const tools = await client.loadToolset("my-toolset");
|
||||
|
||||
const agent = new LlamaIndexAgent({
|
||||
llm,
|
||||
tools,
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
for (const query of queries) {
|
||||
const response = await agent.run(query);
|
||||
console.log(response);
|
||||
}
|
||||
|
||||
if (client.close) await client.close();
|
||||
}
|
||||
|
||||
runApplication().catch(console.error);
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GenkitJS" lang="js" >}}
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { vertexAI } from "@genkit-ai/vertexai";
|
||||
import { z } from "zod";
|
||||
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [vertexAI({ location: "us-central1", projectId: process.env.PROJECT_ID })],
|
||||
});
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking and cancellations.
|
||||
When the user searches for a hotel, mention its name, ID, location and price tier.
|
||||
Always mention hotel ID while performing any operations. This is very important for any operations.
|
||||
For any bookings or cancellations, please provide the appropriate confirmation.
|
||||
Be sure to update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Bern with Bern in it's name.",
|
||||
"Please book the hotel Best Western Bern for me.",
|
||||
"This is too expensive. Please cancel it.",
|
||||
"Please book Comfort Inn Bern for me",
|
||||
"My check in dates for my booking would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function run() {
|
||||
let tools;
|
||||
try {
|
||||
tools = await toolboxClient.loadToolset("my-toolset");
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
|
||||
const toolMap = {};
|
||||
for (const tool of toolboxTools) {
|
||||
let inputSchema;
|
||||
switch (tool.getName()) {
|
||||
case "search-hotels-by-name":
|
||||
inputSchema = z.object({ name: z.string() });
|
||||
break;
|
||||
case "search-hotels-by-location":
|
||||
inputSchema = z.object({ location: z.string() });
|
||||
break;
|
||||
case "book-hotel":
|
||||
inputSchema = z.object({ hotel_id: z.string() });
|
||||
break;
|
||||
case "update-hotel":
|
||||
inputSchema = z.object({
|
||||
hotel_id: z.string(),
|
||||
checkin_date: z.string(),
|
||||
checkout_date: z.string(),
|
||||
});
|
||||
break;
|
||||
case "cancel-hotel":
|
||||
inputSchema = z.object({ hotel_id: z.string() });
|
||||
break;
|
||||
default:
|
||||
inputSchema = z.object({});
|
||||
}
|
||||
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema,
|
||||
},
|
||||
tool
|
||||
);
|
||||
|
||||
toolMap[tool.getName()] = definedTool;
|
||||
}
|
||||
|
||||
let conversationHistory = [
|
||||
{
|
||||
role: "system",
|
||||
content: [{ text: systemPrompt }],
|
||||
},
|
||||
];
|
||||
|
||||
for (const userQuery of queries) {
|
||||
console.log(`\n👤 User: "${userQuery}"`);
|
||||
conversationHistory.push({
|
||||
role: "user",
|
||||
content: [{ text: userQuery }],
|
||||
});
|
||||
|
||||
const response = await ai.generate({
|
||||
model: vertexAI.model("gemini-2.5-flash"),
|
||||
messages: conversationHistory,
|
||||
tools: Object.values(toolMap),
|
||||
});
|
||||
|
||||
let content = [],
|
||||
functionCalls = [];
|
||||
|
||||
if (response.toolRequests?.length) {
|
||||
functionCalls = response.toolRequests;
|
||||
content = response.content || [{ text: response.text || "" }];
|
||||
} else if (response.candidates?.length) {
|
||||
content = response.candidates[0].content;
|
||||
functionCalls = content.filter((part) => part.functionCall);
|
||||
} else {
|
||||
content = [{ text: response.text || response.output || "No response text found" }];
|
||||
}
|
||||
|
||||
conversationHistory.push({
|
||||
role: "model",
|
||||
content,
|
||||
});
|
||||
|
||||
if (functionCalls.length > 0) {
|
||||
for (const call of functionCalls) {
|
||||
const toolName =
|
||||
call.functionCall?.name || call.toolRequest?.name || call.name;
|
||||
const toolArgs =
|
||||
call.functionCall?.args || call.toolRequest?.input || call.input;
|
||||
|
||||
const tool = toolMap[toolName];
|
||||
if (!tool) continue;
|
||||
|
||||
try {
|
||||
const toolResponse = await tool.invoke(toolArgs);
|
||||
|
||||
conversationHistory.push({
|
||||
role: "function",
|
||||
content: [
|
||||
{
|
||||
functionResponse: {
|
||||
name: toolName,
|
||||
response: toolResponse,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
if (toolName.includes("search-hotels")) {
|
||||
if (Array.isArray(toolResponse) && toolResponse.length > 0) {
|
||||
const hotelList = toolResponse
|
||||
.map(
|
||||
(h) =>
|
||||
`* Hotel Name: ${h.name}, ID: ${h.hotel_id}, Location: ${h.location}, Price Tier: ${h.price_tier}`
|
||||
)
|
||||
.join("\n");
|
||||
console.log(`🤖 Hotel Agent: I found these hotels in Bern:\n\n${hotelList}`);
|
||||
} else {
|
||||
console.log("🤖 Hotel Agent: No hotels found.");
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const finalResponse = await ai.generate({
|
||||
model: vertexAI.model("gemini-2.5-flash"),
|
||||
messages: conversationHistory,
|
||||
tools: Object.values(toolMap),
|
||||
});
|
||||
|
||||
const finalMessage =
|
||||
finalResponse.text || finalResponse.output || "No final response";
|
||||
|
||||
conversationHistory.push({
|
||||
role: "model",
|
||||
content: [{ text: finalMessage }],
|
||||
});
|
||||
|
||||
console.log(`🤖 Hotel Agent: ${finalMessage}`);
|
||||
} else {
|
||||
const message =
|
||||
response.text || response.output || content[0]?.text || "No response";
|
||||
console.log(`🤖 Hotel Agent: ${message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
6. Make sure your Toolbox server is running (`./toolbox --tools-file "tools.yaml"`), then run your agent [make sure you are in the root directory]:
|
||||
|
||||
```sh
|
||||
node index.js
|
||||
```
|
||||
@@ -1,9 +1,9 @@
|
||||
---
|
||||
title: "Quickstart (Local)"
|
||||
title: "Python Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 2
|
||||
description: >
|
||||
How to get started running Toolbox locally with Python, PostgreSQL, and [Agent Development Kit](https://google.github.io/adk-docs/),
|
||||
How to get started running Toolbox locally with [Python](https://github.com/googleapis/mcp-toolbox-sdk-python), PostgreSQL, and [Agent Development Kit](https://google.github.io/adk-docs/),
|
||||
[LangGraph](https://www.langchain.com/langgraph), [LlamaIndex](https://www.llamaindex.ai/) or [GoogleGenAI](https://pypi.org/project/google-genai/).
|
||||
---
|
||||
|
||||
@@ -14,8 +14,21 @@ description: >
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Python 3.9+][install-python] (including [pip][install-pip] and
|
||||
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv])
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres]
|
||||
your preferred virtual environment tool for managing dependencies e.g. [venv][install-venv]).
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using `vertexai=True` or a Google GenAI model), follow these one-time setup steps for local development:
|
||||
|
||||
1. [Install the Google Cloud CLI](https://cloud.google.com/sdk/docs/install)
|
||||
1. [Set up Application Default Credentials (ADC)](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
1. Set your project and enable Vertex AI
|
||||
|
||||
```bash
|
||||
gcloud config set project YOUR_PROJECT_ID
|
||||
gcloud services enable aiplatform.googleapis.com
|
||||
```
|
||||
|
||||
[install-python]: https://wiki.python.org/moin/BeginnersGuide/Download
|
||||
[install-pip]: https://pip.pypa.io/en/stable/installation/
|
||||
@@ -141,6 +154,7 @@ postgres` and a password next time.
|
||||
\q
|
||||
```
|
||||
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
@@ -156,7 +170,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -257,6 +271,9 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
@@ -629,7 +646,7 @@ asyncio.run(run_application())
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="ADK" lang="en" %}}
|
||||
To learn more about Agent Development Kit, check out the [ADK
|
||||
documentation.](https://google.github.io/adk-docs/)
|
||||
@@ -650,8 +667,12 @@ Documentation](https://github.com/googleapis/python-genai?tab=readme-ov-file#man
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Run your agent, and observe the results:
|
||||
4. Run your agent, and observe the results:
|
||||
|
||||
```sh
|
||||
python hotel_agent.py
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [Python SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-python).
|
||||
{{</ notice >}}
|
||||
@@ -190,6 +190,18 @@
|
||||
"!sudo lsof -i :5432"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"## Optional: Enable Vertex AI API for Google Cloud\n",
|
||||
"\n",
|
||||
"If you're using a model hosted on **Vertex AI**, run the following command to enable the API:\n",
|
||||
"\n",
|
||||
"```bash\n",
|
||||
"!gcloud services enable aiplatform.googleapis.com\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {
|
||||
@@ -222,7 +234,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.8.0\" # x-release-please-version\n",
|
||||
"version = \"0.9.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -86,7 +86,7 @@ To install Toolbox as a binary:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.8.0
|
||||
export VERSION=0.9.0
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v$VERSION/linux/amd64/toolbox
|
||||
chmod +x toolbox
|
||||
```
|
||||
@@ -97,7 +97,7 @@ You can also install Toolbox as a container:
|
||||
|
||||
```sh
|
||||
# see releases page for other versions
|
||||
export VERSION=0.8.0
|
||||
export VERSION=0.9.0
|
||||
docker pull us-central1-docker.pkg.dev/database-toolbox/toolbox/toolbox:$VERSION
|
||||
```
|
||||
|
||||
@@ -108,7 +108,7 @@ To install from source, ensure you have the latest version of
|
||||
[Go installed](https://go.dev/doc/install), and then run the following command:
|
||||
|
||||
```sh
|
||||
go install github.com/googleapis/genai-toolbox@v0.8.0
|
||||
go install github.com/googleapis/genai-toolbox@v0.9.0
|
||||
```
|
||||
|
||||
{{% /tab %}}
|
||||
@@ -123,6 +123,9 @@ execute `toolbox` to start the server:
|
||||
```sh
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
You can use `toolbox help` for a full list of flags! To stop the server, send a
|
||||
terminate signal (`ctrl+c` on most platforms).
|
||||
@@ -313,4 +316,273 @@ const tools = toolboxTools.map(getTool);
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Core SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-js/blob/main/packages/toolbox-core/README.md).
|
||||
|
||||
#### Go
|
||||
|
||||
Once you've installed the [Toolbox Go
|
||||
SDK](https://pkg.go.dev/github.com/googleapis/mcp-toolbox-sdk-go/core), you can load
|
||||
tools:
|
||||
|
||||
{{< tabpane text=true persist=header >}}
|
||||
{{% tab header="Core" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tools
|
||||
tools, err := client.LoadToolset("toolsetName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="LangChain Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/tmc/langchaingo/llms"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var paramsSchema map[string]any
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with LangChainGo
|
||||
langChainTool := llms.Tool{
|
||||
Type: "function",
|
||||
Function: &llms.FunctionDefinition{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Genkit Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/firebase/genkit/go/ai"
|
||||
"github.com/firebase/genkit/go/genkit"
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"github.com/invopop/jsonschema"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
g, err := genkit.Init(ctx)
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var schema *jsonschema.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
executeFn := func(ctx *ai.ToolContext, input any) (string, error) {
|
||||
result, err := tool.Invoke(ctx, input.(map[string]any))
|
||||
if err != nil {
|
||||
// Propagate errors from the tool invocation.
|
||||
return "", err
|
||||
}
|
||||
|
||||
return result.(string), nil
|
||||
}
|
||||
|
||||
// Use this tool with Genkit Go
|
||||
genkitTool := genkit.DefineToolWithInputSchema(
|
||||
g,
|
||||
tool.Name(),
|
||||
tool.Description(),
|
||||
schema,
|
||||
executeFn,
|
||||
)
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{% tab header="Go GenAI" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
"google.golang.org/genai"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var schema *genai.Schema
|
||||
_ = json.Unmarshal(inputschema, &schema)
|
||||
|
||||
funcDeclaration := &genai.FunctionDeclaration{
|
||||
Name: tool.Name(),
|
||||
Description: tool.Description(),
|
||||
Parameters: schema,
|
||||
}
|
||||
|
||||
// Use this tool with Go GenAI
|
||||
genAITool := &genai.Tool{
|
||||
FunctionDeclarations: []*genai.FunctionDeclaration{funcDeclaration},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
|
||||
{{% tab header="OpenAI Go" lang="en" %}}
|
||||
|
||||
{{< highlight go >}}
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"log"
|
||||
|
||||
"github.com/googleapis/mcp-toolbox-sdk-go/core"
|
||||
openai "github.com/openai/openai-go"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Make sure to add the error checks
|
||||
// Update the url to point to your server
|
||||
URL := "http://127.0.0.1:5000"
|
||||
ctx := context.Background()
|
||||
|
||||
client, err := core.NewToolboxClient(URL)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Toolbox client: %v", err)
|
||||
}
|
||||
|
||||
// Framework agnostic tool
|
||||
tool, err := client.LoadTool("toolName", ctx)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load tools: %v", err)
|
||||
}
|
||||
|
||||
// Fetch the tool's input schema
|
||||
inputschema, err := tool.InputSchema()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to fetch inputSchema: %v", err)
|
||||
}
|
||||
|
||||
var paramsSchema openai.FunctionParameters
|
||||
_ = json.Unmarshal(inputschema, ¶msSchema)
|
||||
|
||||
// Use this tool with OpenAI Go
|
||||
openAITool := openai.ChatCompletionToolParam{
|
||||
Function: openai.FunctionDefinitionParam{
|
||||
Name: tool.Name(),
|
||||
Description: openai.String(tool.Description()),
|
||||
Parameters: paramsSchema,
|
||||
},
|
||||
}
|
||||
}
|
||||
{{< /highlight >}}
|
||||
|
||||
{{% /tab %}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
For more detailed instructions on using the Toolbox Go SDK, see the
|
||||
[project's README](https://github.com/googleapis/mcp-toolbox-sdk-go/blob/main/core/README.md).
|
||||
|
||||
For end-to-end samples on using the Toolbox Go SDK with orchestration frameworks, see the [project's samples](https://github.com/googleapis/mcp-toolbox-sdk-go/tree/main/core/samples)
|
||||
490
docs/en/getting-started/local_quickstart_js.md
Normal file
490
docs/en/getting-started/local_quickstart_js.md
Normal file
@@ -0,0 +1,490 @@
|
||||
---
|
||||
title: "JS Quickstart (Local)"
|
||||
type: docs
|
||||
weight: 3
|
||||
description: >
|
||||
How to get started running Toolbox locally with [JavaScript](https://github.com/googleapis/mcp-toolbox-sdk-python), PostgreSQL, and orchestration frameworks such as [LangChain](https://js.langchain.com/docs/introduction/) and [GenkitJS](https://genkit.dev/docs/get-started/).
|
||||
---
|
||||
|
||||
## Before you begin
|
||||
|
||||
This guide assumes you have already done the following:
|
||||
|
||||
1. Installed [Node.js (v18 or higher)].
|
||||
1. Installed [PostgreSQL 16+ and the `psql` client][install-postgres].
|
||||
|
||||
### Cloud Setup (Optional)
|
||||
|
||||
If you plan to use **Google Cloud’s Vertex AI** with your agent (e.g., using Gemini or PaLM models), follow these one-time setup steps:
|
||||
|
||||
1. [Install the Google Cloud CLI]
|
||||
1. [Set up Application Default Credentials (ADC)]
|
||||
1. Set your project and enable Vertex AI
|
||||
|
||||
```bash
|
||||
gcloud config set project YOUR_PROJECT_ID
|
||||
gcloud services enable aiplatform.googleapis.com
|
||||
```
|
||||
|
||||
[Node.js (v18 or higher)]: https://nodejs.org/
|
||||
[install-postgres]: https://www.postgresql.org/download/
|
||||
[Install the Google Cloud CLI]: https://cloud.google.com/sdk/docs/install
|
||||
[Set up Application Default Credentials (ADC)]: https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment
|
||||
|
||||
|
||||
## Step 1: Set up your database
|
||||
|
||||
In this section, we will create a database, insert some data that needs to be
|
||||
accessed by our agent, and create a database user for Toolbox to connect with.
|
||||
|
||||
1. Connect to postgres using the `psql` command:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U postgres
|
||||
```
|
||||
|
||||
Here, `postgres` denotes the default postgres superuser.
|
||||
|
||||
{{< notice info >}}
|
||||
|
||||
#### **Having trouble connecting?**
|
||||
|
||||
* **Password Prompt:** If you are prompted for a password for the `postgres`
|
||||
user and do not know it (or a blank password doesn't work), your PostgreSQL
|
||||
installation might require a password or a different authentication method.
|
||||
* **`FATAL: role "postgres" does not exist`:** This error means the default
|
||||
`postgres` superuser role isn't available under that name on your system.
|
||||
* **`Connection refused`:** Ensure your PostgreSQL server is actually running.
|
||||
You can typically check with `sudo systemctl status postgresql` and start it
|
||||
with `sudo systemctl start postgresql` on Linux systems.
|
||||
|
||||
<br/>
|
||||
|
||||
#### **Common Solution**
|
||||
|
||||
For password issues or if the `postgres` role seems inaccessible directly, try
|
||||
switching to the `postgres` operating system user first. This user often has
|
||||
permission to connect without a password for local connections (this is called
|
||||
peer authentication).
|
||||
|
||||
```bash
|
||||
sudo -i -u postgres
|
||||
psql -h 127.0.0.1
|
||||
```
|
||||
|
||||
Once you are in the `psql` shell using this method, you can proceed with the
|
||||
database creation steps below. Afterwards, type `\q` to exit `psql`, and then
|
||||
`exit` to return to your normal user shell.
|
||||
|
||||
If desired, once connected to `psql` as the `postgres` OS user, you can set a
|
||||
password for the `postgres` *database* user using: `ALTER USER postgres WITH
|
||||
PASSWORD 'your_chosen_password';`. This would allow direct connection with `-U
|
||||
postgres` and a password next time.
|
||||
{{< /notice >}}
|
||||
|
||||
1. Create a new database and a new user:
|
||||
|
||||
{{< notice tip >}}
|
||||
For a real application, it's best to follow the principle of least permission
|
||||
and only grant the privileges your application needs.
|
||||
{{< /notice >}}
|
||||
|
||||
```sql
|
||||
CREATE USER toolbox_user WITH PASSWORD 'my-password';
|
||||
|
||||
CREATE DATABASE toolbox_db;
|
||||
GRANT ALL PRIVILEGES ON DATABASE toolbox_db TO toolbox_user;
|
||||
|
||||
ALTER DATABASE toolbox_db OWNER TO toolbox_user;
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
(If you used `sudo -i -u postgres` and then `psql`, remember you might also
|
||||
need to type `exit` after `\q` to leave the `postgres` user's shell
|
||||
session.)
|
||||
|
||||
1. Connect to your database with your new user:
|
||||
|
||||
```bash
|
||||
psql -h 127.0.0.1 -U toolbox_user -d toolbox_db
|
||||
```
|
||||
|
||||
1. Create a table using the following command:
|
||||
|
||||
```sql
|
||||
CREATE TABLE hotels(
|
||||
id INTEGER NOT NULL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
location VARCHAR NOT NULL,
|
||||
price_tier VARCHAR NOT NULL,
|
||||
checkin_date DATE NOT NULL,
|
||||
checkout_date DATE NOT NULL,
|
||||
booked BIT NOT NULL
|
||||
);
|
||||
```
|
||||
|
||||
1. Insert data into the table.
|
||||
|
||||
```sql
|
||||
INSERT INTO hotels(id, name, location, price_tier, checkin_date, checkout_date, booked)
|
||||
VALUES
|
||||
(1, 'Hilton Basel', 'Basel', 'Luxury', '2024-04-22', '2024-04-20', B'0'),
|
||||
(2, 'Marriott Zurich', 'Zurich', 'Upscale', '2024-04-14', '2024-04-21', B'0'),
|
||||
(3, 'Hyatt Regency Basel', 'Basel', 'Upper Upscale', '2024-04-02', '2024-04-20', B'0'),
|
||||
(4, 'Radisson Blu Lucerne', 'Lucerne', 'Midscale', '2024-04-24', '2024-04-05', B'0'),
|
||||
(5, 'Best Western Bern', 'Bern', 'Upper Midscale', '2024-04-23', '2024-04-01', B'0'),
|
||||
(6, 'InterContinental Geneva', 'Geneva', 'Luxury', '2024-04-23', '2024-04-28', B'0'),
|
||||
(7, 'Sheraton Zurich', 'Zurich', 'Upper Upscale', '2024-04-27', '2024-04-02', B'0'),
|
||||
(8, 'Holiday Inn Basel', 'Basel', 'Upper Midscale', '2024-04-24', '2024-04-09', B'0'),
|
||||
(9, 'Courtyard Zurich', 'Zurich', 'Upscale', '2024-04-03', '2024-04-13', B'0'),
|
||||
(10, 'Comfort Inn Bern', 'Bern', 'Midscale', '2024-04-04', '2024-04-16', B'0');
|
||||
```
|
||||
|
||||
1. End the database session:
|
||||
|
||||
```bash
|
||||
\q
|
||||
```
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will download Toolbox, configure our tools in a
|
||||
`tools.yaml`, and then run the Toolbox server.
|
||||
|
||||
1. Download the latest version of Toolbox as a binary:
|
||||
|
||||
{{< notice tip >}}
|
||||
Select the
|
||||
[correct binary](https://github.com/googleapis/genai-toolbox/releases)
|
||||
corresponding to your OS and CPU architecture.
|
||||
{{< /notice >}}
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
1. Make the binary executable:
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
1. Write the following into a `tools.yaml` file. Be sure to update any fields
|
||||
such as `user`, `password`, or `database` that you may have customized in the
|
||||
previous step.
|
||||
|
||||
{{< notice tip >}}
|
||||
In practice, use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
{{< /notice >}}
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-pg-source:
|
||||
kind: postgres
|
||||
host: 127.0.0.1
|
||||
port: 5432
|
||||
database: toolbox_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
tools:
|
||||
search-hotels-by-name:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on name.
|
||||
parameters:
|
||||
- name: name
|
||||
type: string
|
||||
description: The name of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE name ILIKE '%' || $1 || '%';
|
||||
search-hotels-by-location:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Search for hotels based on location.
|
||||
parameters:
|
||||
- name: location
|
||||
type: string
|
||||
description: The location of the hotel.
|
||||
statement: SELECT * FROM hotels WHERE location ILIKE '%' || $1 || '%';
|
||||
book-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Book a hotel by its ID. If the hotel is successfully booked, returns a NULL, raises an error if not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to book.
|
||||
statement: UPDATE hotels SET booked = B'1' WHERE id = $1;
|
||||
update-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: >-
|
||||
Update a hotel's check-in and check-out dates by its ID. Returns a message
|
||||
indicating whether the hotel was successfully updated or not.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to update.
|
||||
- name: checkin_date
|
||||
type: string
|
||||
description: The new check-in date of the hotel.
|
||||
- name: checkout_date
|
||||
type: string
|
||||
description: The new check-out date of the hotel.
|
||||
statement: >-
|
||||
UPDATE hotels SET checkin_date = CAST($2 as date), checkout_date = CAST($3
|
||||
as date) WHERE id = $1;
|
||||
cancel-hotel:
|
||||
kind: postgres-sql
|
||||
source: my-pg-source
|
||||
description: Cancel a hotel by its ID.
|
||||
parameters:
|
||||
- name: hotel_id
|
||||
type: string
|
||||
description: The ID of the hotel to cancel.
|
||||
statement: UPDATE hotels SET booked = B'0' WHERE id = $1;
|
||||
toolsets:
|
||||
my-toolset:
|
||||
- search-hotels-by-name
|
||||
- search-hotels-by-location
|
||||
- book-hotel
|
||||
- update-hotel
|
||||
- cancel-hotel
|
||||
```
|
||||
|
||||
For more info on tools, check out the `Resources` section of the docs.
|
||||
|
||||
1. Run the Toolbox server, pointing to the `tools.yaml` file created earlier:
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
In this section, we will write and run an agent that will load the Tools
|
||||
from Toolbox.
|
||||
|
||||
1. (Optional) Initialize a Node.js project:
|
||||
|
||||
```bash
|
||||
npm init -y
|
||||
```
|
||||
|
||||
1. In a new terminal, install the [SDK](https://www.npmjs.com/package/@toolbox-sdk/core).
|
||||
|
||||
```bash
|
||||
npm install langchain @toolbox-sdk/core
|
||||
```
|
||||
|
||||
1. Install other required dependencies
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="bash" >}}
|
||||
npm install langchain @langchain/google-vertexai
|
||||
{{< /tab >}}
|
||||
{{< tab header="GenkitJS" lang="bash" >}}
|
||||
npm install genkit @genkit-ai/vertexai
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Create a new file named `hotelAgent.js` and copy the following code to create an agent:
|
||||
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="LangChain" lang="js" >}}
|
||||
|
||||
import { ChatVertexAI } from "@langchain/google-vertexai";
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { tool } from "@langchain/core/tools";
|
||||
import { createReactAgent } from "@langchain/langgraph/prebuilt";
|
||||
import { MemorySaver } from "@langchain/langgraph";
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const prompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function runApplication() {
|
||||
const model = new ChatVertexAI({
|
||||
model: "gemini-2.0-flash",
|
||||
});
|
||||
|
||||
|
||||
const client = new ToolboxClient("http://127.0.0.1:5000");
|
||||
const toolboxTools = await client.loadToolset("my-toolset");
|
||||
|
||||
// Define the basics of the tool: name, description, schema and core logic
|
||||
const getTool = (toolboxTool) => tool(toolboxTool, {
|
||||
name: toolboxTool.getName(),
|
||||
description: toolboxTool.getDescription(),
|
||||
schema: toolboxTool.getParamSchema()
|
||||
});
|
||||
const tools = toolboxTools.map(getTool);
|
||||
|
||||
const agent = createReactAgent({
|
||||
llm: model,
|
||||
tools: tools,
|
||||
checkpointer: new MemorySaver(),
|
||||
systemPrompt: prompt,
|
||||
});
|
||||
|
||||
const langGraphConfig = {
|
||||
configurable: {
|
||||
thread_id: "test-thread",
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
for (const query of queries) {
|
||||
const agentOutput = await agent.invoke(
|
||||
{
|
||||
messages: [
|
||||
{
|
||||
role: "user",
|
||||
content: query,
|
||||
},
|
||||
],
|
||||
verbose: true,
|
||||
},
|
||||
langGraphConfig
|
||||
);
|
||||
const response = agentOutput.messages[agentOutput.messages.length - 1].content;
|
||||
console.log(response);
|
||||
}
|
||||
}
|
||||
|
||||
runApplication()
|
||||
.catch(console.error)
|
||||
.finally(() => console.log("\nApplication finished."));
|
||||
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="GenkitJS" lang="js" >}}
|
||||
|
||||
import { ToolboxClient } from "@toolbox-sdk/core";
|
||||
import { genkit } from "genkit";
|
||||
import { googleAI } from '@genkit-ai/googleai';
|
||||
|
||||
// Replace it with your API key
|
||||
process.env.GOOGLE_API_KEY = 'your-api-key';
|
||||
|
||||
const systemPrompt = `
|
||||
You're a helpful hotel assistant. You handle hotel searching, booking, and
|
||||
cancellations. When the user searches for a hotel, mention its name, id,
|
||||
location and price tier. Always mention hotel ids while performing any
|
||||
searches. This is very important for any operations. For any bookings or
|
||||
cancellations, please provide the appropriate confirmation. Be sure to
|
||||
update checkin or checkout dates if mentioned by the user.
|
||||
Don't ask for confirmations from the user.
|
||||
`;
|
||||
|
||||
const queries = [
|
||||
"Find hotels in Basel with Basel in its name.",
|
||||
"Can you book the Hilton Basel for me?",
|
||||
"Oh wait, this is too expensive. Please cancel it and book the Hyatt Regency instead.",
|
||||
"My check in dates would be from April 10, 2024 to April 19, 2024.",
|
||||
];
|
||||
|
||||
async function run() {
|
||||
const toolboxClient = new ToolboxClient("http://127.0.0.1:5000");
|
||||
|
||||
const ai = genkit({
|
||||
plugins: [
|
||||
googleAI({
|
||||
apiKey: process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY
|
||||
})
|
||||
],
|
||||
model: googleAI.model('gemini-2.0-flash'),
|
||||
});
|
||||
|
||||
const toolboxTools = await toolboxClient.loadToolset("my-toolset");
|
||||
const toolMap = Object.fromEntries(
|
||||
toolboxTools.map((tool) => {
|
||||
const definedTool = ai.defineTool(
|
||||
{
|
||||
name: tool.getName(),
|
||||
description: tool.getDescription(),
|
||||
inputSchema: tool.getParamSchema(),
|
||||
},
|
||||
tool
|
||||
);
|
||||
return [tool.getName(), definedTool];
|
||||
})
|
||||
);
|
||||
const tools = Object.values(toolMap);
|
||||
|
||||
let conversationHistory = [{ role: "system", content: [{ text: systemPrompt }] }];
|
||||
|
||||
for (const query of queries) {
|
||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||
const response = await ai.generate({
|
||||
messages: conversationHistory,
|
||||
tools: tools,
|
||||
});
|
||||
conversationHistory.push(response.message);
|
||||
|
||||
const toolRequests = response.toolRequests;
|
||||
if (toolRequests?.length > 0) {
|
||||
// Execute tools concurrently and collect their responses.
|
||||
const toolResponses = await Promise.all(
|
||||
toolRequests.map(async (call) => {
|
||||
try {
|
||||
const toolOutput = await toolMap[call.name].invoke(call.input);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: toolOutput } }] };
|
||||
} catch (e) {
|
||||
console.error(`Error executing tool ${call.name}:`, e);
|
||||
return { role: "tool", content: [{ toolResponse: { name: call.name, output: { error: e.message } } }] };
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
conversationHistory.push(...toolResponses);
|
||||
|
||||
// Call the AI again with the tool results.
|
||||
response = await ai.generate({ messages: conversationHistory, tools });
|
||||
conversationHistory.push(response.message);
|
||||
}
|
||||
|
||||
console.log(response.text);
|
||||
}
|
||||
}
|
||||
|
||||
run();
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
|
||||
1. Run your agent, and observe the results:
|
||||
|
||||
```sh
|
||||
node hotelAgent.js
|
||||
```
|
||||
|
||||
{{< notice info >}}
|
||||
For more information, visit the [JS SDK repo](https://github.com/googleapis/mcp-toolbox-sdk-js).
|
||||
{{</ notice >}}
|
||||
@@ -105,7 +105,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -218,17 +218,25 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running:
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
🔍 MCP Inspector is up and running at http://127.0.0.1:5173 🚀
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `SSE`.
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse`.
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
@@ -238,4 +246,4 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||

|
||||
|
||||
1. Test out your tools here!
|
||||
1. Test out your tools here!
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 32 KiB |
@@ -52,19 +52,19 @@ Omni](https://cloud.google.com/alloydb/omni/current/docs/overview).
|
||||
<!-- {x-release-please-start-version} -->
|
||||
{{< tabpane persist=header >}}
|
||||
{{< tab header="linux/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/linux/amd64/toolbox>
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/linux/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/arm64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/arm64/toolbox>
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/arm64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="darwin/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/darwin/amd64/toolbox>
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/darwin/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
|
||||
{{< tab header="windows/amd64" lang="bash" >}}
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.8.0/windows/amd64/toolbox>
|
||||
curl -O <https://storage.googleapis.com/genai-toolbox/v0.9.0/windows/amd64/toolbox>
|
||||
{{< /tab >}}
|
||||
{{< /tabpane >}}
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -20,18 +20,16 @@ The native SDKs can be combined with MCP clients in many cases.
|
||||
|
||||
Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
* [2024-11-05](https://spec.modelcontextprotocol.io/specification/2024-11-05/)
|
||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||
|
||||
### Features Not Supported by MCP
|
||||
### Toolbox AuthZ/AuthN Not Supported by MCP
|
||||
|
||||
Toolbox has several features that are not yet supported in the MCP specification:
|
||||
|
||||
* **AuthZ/AuthN:** There are no auth implementation in the `2024-11-05`
|
||||
specification. This includes:
|
||||
The auth implementation in Toolbox is not supported in MCP's auth specification.
|
||||
This includes:
|
||||
* [Authenticated Parameters](../resources/tools/_index.md#authenticated-parameters)
|
||||
* [Authorized Invocations](../resources/tools/_index.md#authorized-invocations)
|
||||
* **Notifications:** Currently, editing Toolbox Tools requires a server restart.
|
||||
Clients should reload tools on disconnect to get the latest version.
|
||||
|
||||
## Connecting to Toolbox with an MCP client
|
||||
|
||||
@@ -63,11 +61,15 @@ When running with stdio, Toolbox will listen via stdio instead of acting as a
|
||||
remote HTTP server. Logs will be set to the `warn` level by default. `debug` and
|
||||
`info` logs are not supported with stdio.
|
||||
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
### Connecting via HTTP
|
||||
|
||||
Toolbox supports the HTTP transport protocol with and without SSE.
|
||||
|
||||
{{< tabpane text=true >}} {{% tab header="HTTP with SSE" lang="en" %}}
|
||||
{{< tabpane text=true >}} {{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
|
||||
Add the following configuration to your MCP client configuration:
|
||||
|
||||
```bash
|
||||
@@ -83,11 +85,25 @@ Add the following configuration to your MCP client configuration:
|
||||
|
||||
If you would like to connect to a specific toolset, replace `url` with
|
||||
`"http://127.0.0.1:5000/mcp/{toolset_name}/sse"`.
|
||||
{{% /tab %}} {{% tab header="HTTP POST" lang="en" %}}
|
||||
Connect to Toolbox HTTP POST via `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
If you would like to connect to a specific toolset, connect via
|
||||
`http://127.0.0.1:5000/mcp/{toolset_name}`.
|
||||
HTTP with SSE is only supported in version `2024-11-05` and is currently
|
||||
deprecated.
|
||||
{{% /tab %}} {{% tab header="Streamable HTTP" lang="en" %}}
|
||||
Add the following configuration to your MCP client configuration:
|
||||
|
||||
```bash
|
||||
{
|
||||
"mcpServers": {
|
||||
"toolbox": {
|
||||
"type": "http",
|
||||
"url": "http://127.0.0.1:5000/mcp",
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
If you would like to connect to a specific toolset, replace `url` with
|
||||
`"http://127.0.0.1:5000/mcp/{toolset_name}"`.
|
||||
{{% /tab %}} {{< /tabpane >}}
|
||||
|
||||
### Using the MCP Inspector with Toolbox
|
||||
@@ -114,7 +130,7 @@ testing and debugging Toolbox server.
|
||||
1. Click the `Connect` button. It might take awhile to spin up Toolbox. Voila!
|
||||
You should be able to inspect your toolbox tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="HTTP with SSE" lang="en" %}}
|
||||
{{% tab header="HTTP with SSE (deprecated)" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
@@ -128,6 +144,23 @@ testing and debugging Toolbox server.
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse` to use all tool or
|
||||
`http//127.0.0.1:5000/mcp/{toolset_name}/sse` to use a specific toolset.
|
||||
|
||||
1. Click the `Connect` button. Voila! You should be able to inspect your toolbox
|
||||
tools!
|
||||
{{% /tab %}}
|
||||
{{% tab header="Streamable HTTP" lang="en" %}}
|
||||
1. [Run Toolbox](../getting-started/introduction/_index.md#running-the-server).
|
||||
|
||||
1. In a separate terminal, run Inspector directly through `npx`:
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
1. For `Transport Type` dropdown menu, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp` to use all tool or
|
||||
`http//127.0.0.1:5000/mcp/{toolset_name}` to use a specific toolset.
|
||||
|
||||
1. Click the `Connect` button. Voila! You should be able to inspect your toolbox
|
||||
tools!
|
||||
{{% /tab %}} {{< /tabpane >}}
|
||||
|
||||
@@ -79,7 +79,7 @@ database are in the same VPC network.
|
||||
|
||||
Create a `tools.yaml` file that contains your configuration for Toolbox. For
|
||||
details, see the
|
||||
[configuration](https://github.com/googleapis/genai-toolbox/blob/main/README.md#configuration)
|
||||
[configuration](https://googleapis.github.io/genai-toolbox/resources/sources/)
|
||||
section.
|
||||
|
||||
## Deploy to Cloud Run
|
||||
@@ -133,22 +133,16 @@ section.
|
||||
|
||||
## Connecting with Toolbox Client SDK
|
||||
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK
|
||||
You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
|
||||
1. [Set up `Cloud Run Invoker` role
|
||||
access](https://cloud.google.com/run/docs/securing/managing-access#service-add-principals)
|
||||
to your Cloud Run service.
|
||||
|
||||
1. Set up [Application Default
|
||||
1. (Only for local runs) Set up [Application Default
|
||||
Credentials](https://cloud.google.com/docs/authentication/set-up-adc-local-dev-environment)
|
||||
for the principle you set up the `Cloud Run Invoker` role access to.
|
||||
|
||||
{{< notice tip >}}
|
||||
If you're working in some other environment than local, set up [environment
|
||||
specific Default
|
||||
Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc).
|
||||
{{< /notice >}}
|
||||
|
||||
1. Run the following to retrieve a non-deterministic URL for the cloud run service:
|
||||
|
||||
```bash
|
||||
@@ -160,9 +154,11 @@ You can connect to Toolbox Cloud Run instances directly through the SDK
|
||||
```python
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token # can also use sync method
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step.
|
||||
URL = "https://cloud-run-url.app"
|
||||
|
||||
auth_token_provider = auth_methods.aget_google_id_token(URL) # can also use sync method
|
||||
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
|
||||
70
docs/en/resources/sources/firestore.md
Normal file
70
docs/en/resources/sources/firestore.md
Normal file
@@ -0,0 +1,70 @@
|
||||
---
|
||||
title: "Firestore"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Firestore is a NoSQL document database built for automatic scaling, high performance, and ease of application development. It's a fully managed, serverless database that supports mobile, web, and server development.
|
||||
|
||||
---
|
||||
|
||||
# Firestore Source
|
||||
|
||||
[Firestore][firestore-docs] is a NoSQL document database built for automatic
|
||||
scaling, high performance, and ease of application development. While the
|
||||
Firestore interface has many of the same features as traditional databases,
|
||||
as a NoSQL database it differs from them in the way it describes relationships
|
||||
between data objects.
|
||||
|
||||
If you are new to Firestore, you can [create a database and learn the
|
||||
basics][firestore-quickstart].
|
||||
|
||||
[firestore-docs]: https://cloud.google.com/firestore/docs
|
||||
[firestore-quickstart]: https://cloud.google.com/firestore/docs/quickstart-servers
|
||||
|
||||
## Requirements
|
||||
|
||||
### IAM Permissions
|
||||
|
||||
Firestore uses [Identity and Access Management (IAM)][iam-overview] to control
|
||||
user and group access to Firestore resources. Toolbox will use your [Application
|
||||
Default Credentials (ADC)][adc] to authorize and authenticate when interacting
|
||||
with [Firestore][firestore-docs].
|
||||
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the correct IAM permissions for accessing
|
||||
Firestore. Common roles include:
|
||||
- `roles/datastore.user` - Read and write access to Firestore
|
||||
- `roles/datastore.viewer` - Read-only access to Firestore
|
||||
|
||||
See [Firestore access control][firestore-iam] for more information on
|
||||
applying IAM permissions and roles to an identity.
|
||||
|
||||
[iam-overview]: https://cloud.google.com/firestore/docs/security/iam
|
||||
[adc]: https://cloud.google.com/docs/authentication#adc
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
[firestore-iam]: https://cloud.google.com/firestore/docs/security/iam
|
||||
|
||||
### Database Selection
|
||||
|
||||
Firestore allows you to create multiple databases within a single project. Each
|
||||
database is isolated from the others and has its own set of documents and
|
||||
collections. If you don't specify a database in your configuration, the default
|
||||
database named `(default)` will be used.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-firestore-source:
|
||||
kind: "firestore"
|
||||
project: "my-project-id"
|
||||
# database: "my-database" # Optional, defaults to "(default)"
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|----------------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore". |
|
||||
| project | string | true | Id of the GCP project that contains the Firestore database (e.g. "my-project-id"). |
|
||||
| database | string | false | Name of the Firestore database to connect to. Defaults to "(default)" if not specified. |
|
||||
@@ -4,7 +4,6 @@ type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
MySQL is a relational database management system that stores and manages data.
|
||||
|
||||
---
|
||||
|
||||
## About
|
||||
@@ -35,6 +34,7 @@ sources:
|
||||
database: my_db
|
||||
user: ${USER_NAME}
|
||||
password: ${PASSWORD}
|
||||
queryTimeout: 30s # Optional: query timeout duration
|
||||
```
|
||||
|
||||
{{< notice tip >}}
|
||||
@@ -44,11 +44,12 @@ instead of hardcoding your secrets into the configuration file.
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-----------|:--------:|:------------:|---------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "mysql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MySQL user to connect as (e.g. "my-mysql-user"). |
|
||||
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| ------------ | :------: | :----------: | ----------------------------------------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "mysql". |
|
||||
| host | string | true | IP address to connect to (e.g. "127.0.0.1"). |
|
||||
| port | string | true | Port to connect to (e.g. "3306"). |
|
||||
| database | string | true | Name of the MySQL database to connect to (e.g. "my_db"). |
|
||||
| user | string | true | Name of the MySQL user to connect as (e.g. "my-mysql-user"). |
|
||||
| password | string | true | Password of the MySQL user (e.g. "my-password"). |
|
||||
| queryTimeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, no timeout is applied. |
|
||||
|
||||
@@ -33,7 +33,7 @@ sources:
|
||||
my-redis-instance:
|
||||
kind: redis
|
||||
address:
|
||||
- 127.0.0.1
|
||||
- 127.0.0.1:6379
|
||||
username: ${MY_USER_NAME}
|
||||
password: ${MY_AUTH_STRING} # Omit this field if you don't have a password.
|
||||
# database: 0
|
||||
@@ -58,7 +58,7 @@ sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address:
|
||||
- 127.0.0.1
|
||||
- 127.0.0.1:6379
|
||||
password: ${MY_AUTH_STRING}
|
||||
# useGCPIAM: false
|
||||
# clusterEnabled: false
|
||||
@@ -74,7 +74,8 @@ using IAM authentication:
|
||||
sources:
|
||||
my-redis-cluster-instance:
|
||||
kind: memorystore-redis
|
||||
address: 127.0.0.1
|
||||
address:
|
||||
- 127.0.0.1:6379
|
||||
useGCPIAM: true
|
||||
clusterEnabled: true
|
||||
```
|
||||
|
||||
@@ -17,7 +17,7 @@ sets, sorted sets with range queries, bitmaps, hyperloglogs, and geospatial
|
||||
indexes with radius queries.
|
||||
|
||||
If you're new to Valkey, you can find installation and getting started guides on
|
||||
the [official Valkey website](https://valkey.io/docs/getting-started/).
|
||||
the [official Valkey website](https://valkey.io/topics/quickstart/).
|
||||
|
||||
## Example
|
||||
|
||||
@@ -26,7 +26,7 @@ sources:
|
||||
my-valkey-instance:
|
||||
kind: valkey
|
||||
address:
|
||||
- 127.0.0.1
|
||||
- 127.0.0.1:6379
|
||||
username: ${YOUR_USERNAME}
|
||||
password: ${YOUR_PASSWORD}
|
||||
# database: 0
|
||||
@@ -50,7 +50,7 @@ sources:
|
||||
my-valkey-instance:
|
||||
kind: valkey
|
||||
address:
|
||||
- 127.0.0.1
|
||||
- 127.0.0.1:6379
|
||||
useGCPIAM: true
|
||||
```
|
||||
|
||||
|
||||
@@ -81,8 +81,9 @@ the parameter.
|
||||
|-------------|:---------------:|:------------:|-----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be one of "string", "integer", "float", "boolean" "array" |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. |
|
||||
| required | bool | false | Indicate if the parameter is required. Default to `true`. |
|
||||
|
||||
### Array Parameters
|
||||
|
||||
@@ -107,14 +108,47 @@ in the list using the items field:
|
||||
|-------------|:----------------:|:------------:|-----------------------------------------------------------------------------|
|
||||
| name | string | true | Name of the parameter. |
|
||||
| type | string | true | Must be "array" |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, the parameter is not required. |
|
||||
| description | string | true | Natural language description of the parameter to describe it to the agent. |
|
||||
| default | parameter type | false | Default value of the parameter. If provided, `required` will be `false`. |
|
||||
| required | bool | false | Indicate if the parameter is required. Default to `true`. |
|
||||
| items | parameter object | true | Specify a Parameter object for the type of the values in the array. |
|
||||
|
||||
{{< notice note >}}
|
||||
Items in array should not have a default value. If provided, it will be ignored.
|
||||
Items in array should not have a `default` or `required` value. If provided, it will be ignored.
|
||||
{{< /notice >}}
|
||||
|
||||
### Map Parameters
|
||||
|
||||
The map type is a collection of key-value pairs. It can be configured in two ways:
|
||||
|
||||
- Generic Map: By default, it accepts values of any primitive type (string, number, boolean), allowing for mixed data.
|
||||
- Typed Map: By setting the valueType field, you can enforce that all values
|
||||
within the map must be of the same specified type.
|
||||
|
||||
#### Generic Map (Mixed Value Types)
|
||||
|
||||
This is the default behavior when valueType is omitted. It's useful for passing a flexible group of settings.
|
||||
|
||||
```yaml
|
||||
parameters:
|
||||
- name: execution_context
|
||||
type: map
|
||||
description: A flexible set of key-value pairs for the execution environment.
|
||||
```
|
||||
|
||||
#### Typed Map
|
||||
|
||||
Specify valueType to ensure all values in the map are of the same type. An error
|
||||
will be thrown in case of value type mismatch.
|
||||
|
||||
```yaml
|
||||
parameters:
|
||||
- name: user_scores
|
||||
type: map
|
||||
description: A map of user IDs to their scores. All scores must be integers.
|
||||
valueType: integer # This enforces the value type for all entries.
|
||||
```
|
||||
|
||||
### Authenticated Parameters
|
||||
|
||||
Authenticated parameters are automatically populated with user
|
||||
|
||||
@@ -15,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
|
||||
on the given source.
|
||||
`bigquery-get-dataset-info` takes a `dataset` parameter to specify the dataset
|
||||
on the given source. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided,
|
||||
the tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-table-info takes dataset and table parameters to specify
|
||||
the target table.
|
||||
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
|
||||
the target table. It also optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,8 +15,9 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-list-dataset-ids requires no input parameters beyond the configured
|
||||
source.
|
||||
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,8 +15,10 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../sources/bigquery.md)
|
||||
|
||||
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
|
||||
from which to list table IDs.
|
||||
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
|
||||
from which to list table IDs. It also optionally accepts a `project` parameter to
|
||||
define the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -20,8 +20,11 @@ instance. It's compatible with any of the following sources:
|
||||
|
||||
Bigtable supports SQL queries. The integration with Toolbox supports `googlesql`
|
||||
dialect, the specified SQL statement is executed as a [data manipulation
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will
|
||||
inserted according to their name: e.g. `@name`.
|
||||
language (DML)][bigtable-googlesql] statements, and specified parameters will inserted according to their name: e.g. `@name`.
|
||||
|
||||
{{<notice note>}}
|
||||
Bigtable's GoogleSQL support for DML statements might be limited to certain query types. For detailed information on supported DML statements and use cases, refer to the [Bigtable GoogleSQL use cases](https://cloud.google.com/bigtable/docs/googlesql-overview#use-cases).
|
||||
{{</notice>}}
|
||||
|
||||
[bigtable-googlesql]: https://cloud.google.com/bigtable/docs/googlesql-overview
|
||||
|
||||
|
||||
7
docs/en/resources/tools/firestore/_index.md
Normal file
7
docs/en/resources/tools/firestore/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Firestore"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Firestore Sources.
|
||||
---
|
||||
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: "firestore-delete-documents"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-delete-documents" tool deletes multiple documents from Firestore by their paths.
|
||||
aliases:
|
||||
- /resources/tools/firestore-delete-documents
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-delete-documents` tool deletes multiple documents from Firestore by their paths.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
|
||||
`firestore-delete-documents` takes one input parameter `documentPaths` which is an array of
|
||||
document paths to delete. The tool uses Firestore's BulkWriter for efficient batch deletion
|
||||
and returns the success status for each document.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
delete_user_documents:
|
||||
kind: firestore-delete-documents
|
||||
source: my-firestore-source
|
||||
description: Use this tool to delete multiple documents from Firestore.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-delete-documents". |
|
||||
| source | string | true | Name of the Firestore source to delete documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
38
docs/en/resources/tools/firestore/firestore-get-documents.md
Normal file
38
docs/en/resources/tools/firestore/firestore-get-documents.md
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: "firestore-get-documents"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-get-documents" tool retrieves multiple documents from Firestore by their paths.
|
||||
aliases:
|
||||
- /resources/tools/firestore-get-documents
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-get-documents` tool retrieves multiple documents from Firestore by their paths.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
|
||||
`firestore-get-documents` takes one input parameter `documentPaths` which is an array of
|
||||
document paths, and returns the documents' data along with metadata such as existence status,
|
||||
creation time, update time, and read time.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_user_documents:
|
||||
kind: firestore-get-documents
|
||||
source: my-firestore-source
|
||||
description: Use this tool to retrieve multiple documents from Firestore.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-documents". |
|
||||
| source | string | true | Name of the Firestore source to retrieve documents from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
37
docs/en/resources/tools/firestore/firestore-get-rules.md
Normal file
37
docs/en/resources/tools/firestore/firestore-get-rules.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: "firestore-get-rules"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-get-rules" tool retrieves the active Firestore security rules for the current project.
|
||||
aliases:
|
||||
- /resources/tools/firestore-get-rules
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-get-rules` tool retrieves the active [Firestore security rules](https://firebase.google.com/docs/firestore/security/get-started) for the current project.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
|
||||
`firestore-get-rules` takes no input parameters and returns the security rules content along with metadata
|
||||
such as the ruleset name, and timestamps.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
get_firestore_rules:
|
||||
kind: firestore-get-rules
|
||||
source: my-firestore-source
|
||||
description: Use this tool to retrieve the active Firestore security rules.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-get-rules". |
|
||||
| source | string | true | Name of the Firestore source to retrieve rules from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -0,0 +1,38 @@
|
||||
---
|
||||
title: "firestore-list-collections"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "firestore-list-collections" tool lists collections in Firestore, either at the root level or as subcollections of a document.
|
||||
aliases:
|
||||
- /resources/tools/firestore-list-collections
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `firestore-list-collections` tool lists [collections](https://firebase.google.com/docs/firestore/data-model#collections) in Firestore, either at the root level or as [subcollections](https://firebase.google.com/docs/firestore/data-model#subcollections) of a specific document.
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [firestore](../sources/firestore.md)
|
||||
|
||||
`firestore-list-collections` takes an optional `parentPath` parameter to specify a document
|
||||
path. If provided, it lists all subcollections of that document. If not provided, it lists
|
||||
all root-level collections in the database.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
list_firestore_collections:
|
||||
kind: firestore-list-collections
|
||||
source: my-firestore-source
|
||||
description: Use this tool to list collections in Firestore.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "firestore-list-collections". |
|
||||
| source | string | true | Name of the Firestore source to list collections from. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
198
docs/en/resources/tools/firestore/firestore-query-collection.md
Normal file
198
docs/en/resources/tools/firestore/firestore-query-collection.md
Normal file
@@ -0,0 +1,198 @@
|
||||
# firestore-query-collection
|
||||
|
||||
The `firestore-query-collection` tool allows you to query Firestore collections with filters, ordering, and limit capabilities.
|
||||
|
||||
## Configuration
|
||||
|
||||
To use this tool, you need to configure it in your YAML configuration file:
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
config:
|
||||
project: my-gcp-project
|
||||
database: "(default)"
|
||||
|
||||
tools:
|
||||
query_collection:
|
||||
kind: firestore-query-collection
|
||||
source: my-firestore
|
||||
description: Query Firestore collections with advanced filtering
|
||||
```
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Required | Default | Description |
|
||||
|-----------|------|----------|---------|-------------|
|
||||
| `collectionPath` | string | Yes | - | The path to the Firestore collection to query |
|
||||
| `filters` | array | No | - | Array of filter objects (as JSON strings) to apply to the query |
|
||||
| `orderBy` | string | No | - | JSON string specifying field and direction to order results |
|
||||
| `limit` | integer | No | 100 | Maximum number of documents to return |
|
||||
| `analyzeQuery` | boolean | No | false | If true, returns query explain metrics including execution statistics |
|
||||
|
||||
### Filter Format
|
||||
|
||||
Each filter in the `filters` array should be a JSON string with the following structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"field": "fieldName",
|
||||
"op": "operator",
|
||||
"value": "compareValue"
|
||||
}
|
||||
```
|
||||
|
||||
Supported operators:
|
||||
- `<` - Less than
|
||||
- `<=` - Less than or equal to
|
||||
- `>` - Greater than
|
||||
- `>=` - Greater than or equal to
|
||||
- `==` - Equal to
|
||||
- `!=` - Not equal to
|
||||
- `array-contains` - Array contains a specific value
|
||||
- `array-contains-any` - Array contains any of the specified values
|
||||
- `in` - Field value is in the specified array
|
||||
- `not-in` - Field value is not in the specified array
|
||||
|
||||
Value types supported:
|
||||
- String: `"value": "text"`
|
||||
- Number: `"value": 123` or `"value": 45.67`
|
||||
- Boolean: `"value": true` or `"value": false`
|
||||
- Array: `"value": ["item1", "item2"]` (for `in`, `not-in`, `array-contains-any` operators)
|
||||
|
||||
### OrderBy Format
|
||||
|
||||
The `orderBy` parameter should be a JSON string with the following structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"field": "fieldName",
|
||||
"direction": "ASCENDING"
|
||||
}
|
||||
```
|
||||
|
||||
Direction values:
|
||||
- `ASCENDING`
|
||||
- `DESCENDING`
|
||||
|
||||
## Example Usage
|
||||
|
||||
### Query with filters
|
||||
|
||||
```json
|
||||
{
|
||||
"collectionPath": "users",
|
||||
"filters": [
|
||||
"{\"field\": \"age\", \"op\": \">\", \"value\": 18}",
|
||||
"{\"field\": \"status\", \"op\": \"==\", \"value\": \"active\"}"
|
||||
],
|
||||
"orderBy": "{\"field\": \"createdAt\", \"direction\": \"DESCENDING\"}",
|
||||
"limit": 50
|
||||
}
|
||||
```
|
||||
|
||||
### Query with array contains filter
|
||||
|
||||
```json
|
||||
{
|
||||
"collectionPath": "products",
|
||||
"filters": [
|
||||
"{\"field\": \"categories\", \"op\": \"array-contains\", \"value\": \"electronics\"}",
|
||||
"{\"field\": \"price\", \"op\": \"<\", \"value\": 1000}"
|
||||
],
|
||||
"orderBy": "{\"field\": \"price\", \"direction\": \"ASCENDING\"}",
|
||||
"limit": 20
|
||||
}
|
||||
```
|
||||
|
||||
### Query with IN operator
|
||||
|
||||
```json
|
||||
{
|
||||
"collectionPath": "orders",
|
||||
"filters": [
|
||||
"{\"field\": \"status\", \"op\": \"in\", \"value\": [\"pending\", \"processing\"]}"
|
||||
],
|
||||
"limit": 100
|
||||
}
|
||||
```
|
||||
|
||||
### Query with explain metrics
|
||||
|
||||
```json
|
||||
{
|
||||
"collectionPath": "users",
|
||||
"filters": [
|
||||
"{\"field\": \"age\", \"op\": \">=\", \"value\": 21}",
|
||||
"{\"field\": \"active\", \"op\": \"==\", \"value\": true}"
|
||||
],
|
||||
"orderBy": "{\"field\": \"lastLogin\", \"direction\": \"DESCENDING\"}",
|
||||
"limit": 25,
|
||||
"analyzeQuery": true
|
||||
}
|
||||
```
|
||||
|
||||
## Response Format
|
||||
|
||||
### Standard Response (analyzeQuery = false)
|
||||
|
||||
The tool returns an array of documents, where each document includes:
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "documentId",
|
||||
"path": "collection/documentId",
|
||||
"data": {
|
||||
// Document fields
|
||||
},
|
||||
"createTime": "2025-01-07T12:00:00Z",
|
||||
"updateTime": "2025-01-07T12:00:00Z",
|
||||
"readTime": "2025-01-07T12:00:00Z"
|
||||
}
|
||||
```
|
||||
|
||||
### Response with Query Analysis (analyzeQuery = true)
|
||||
|
||||
When `analyzeQuery` is set to true, the tool returns a single object containing documents and explain metrics:
|
||||
|
||||
```json
|
||||
{
|
||||
"documents": [
|
||||
// Array of document objects as shown above
|
||||
],
|
||||
"explainMetrics": {
|
||||
"planSummary": {
|
||||
"indexesUsed": [
|
||||
{
|
||||
"query_scope": "Collection",
|
||||
"properties": "(field ASC, __name__ ASC)"
|
||||
}
|
||||
]
|
||||
},
|
||||
"executionStats": {
|
||||
"resultsReturned": 50,
|
||||
"readOperations": 50,
|
||||
"executionDuration": "120ms",
|
||||
"debugStats": {
|
||||
"indexes_entries_scanned": "1000",
|
||||
"documents_scanned": "50",
|
||||
"billing_details": {
|
||||
"documents_billable": "50",
|
||||
"index_entries_billable": "1000",
|
||||
"min_query_cost": "0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool will return errors for:
|
||||
- Invalid collection path
|
||||
- Malformed filter JSON
|
||||
- Unsupported operators
|
||||
- Query execution failures
|
||||
- Invalid orderBy format
|
||||
115
docs/en/resources/tools/firestore/firestore-validate-rules.md
Normal file
115
docs/en/resources/tools/firestore/firestore-validate-rules.md
Normal file
@@ -0,0 +1,115 @@
|
||||
---
|
||||
title: firestore-validate-rules
|
||||
weight: 6
|
||||
date: 2025-01-07
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
The `firestore-validate-rules` tool validates Firestore security rules syntax and semantic correctness without deploying them. It provides detailed error reporting with source positions and code snippets.
|
||||
|
||||
## Configuration
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
firestore-validate-rules:
|
||||
kind: firestore-validate-rules
|
||||
source: <firestore-source-name>
|
||||
description: "Checks the provided Firestore Rules source for syntax and validation errors"
|
||||
```
|
||||
|
||||
## Authentication
|
||||
|
||||
This tool requires authentication if the source requires authentication.
|
||||
|
||||
## Parameters
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
|-----------|--------|----------|-------------|
|
||||
| source | string | Yes | The Firestore Rules source code to validate |
|
||||
|
||||
## Response
|
||||
|
||||
The tool returns a `ValidationResult` object containing:
|
||||
|
||||
```json
|
||||
{
|
||||
"valid": boolean, // Whether the rules are valid
|
||||
"issueCount": number, // Number of issues found
|
||||
"formattedIssues": string, // Human-readable formatted issues
|
||||
"rawIssues": [ // Array of raw issue objects
|
||||
{
|
||||
"sourcePosition": {
|
||||
"fileName": string,
|
||||
"line": number,
|
||||
"column": number,
|
||||
"currentOffset": number,
|
||||
"endOffset": number
|
||||
},
|
||||
"description": string,
|
||||
"severity": string // e.g., "ERROR", "WARNING"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
### Validate simple rules
|
||||
|
||||
```json
|
||||
{
|
||||
"source": "rules_version = '2';\nservice cloud.firestore {\n match /databases/{database}/documents {\n match /{document=**} {\n allow read, write: if true;\n }\n }\n}"
|
||||
}
|
||||
```
|
||||
|
||||
### Example response for valid rules
|
||||
|
||||
```json
|
||||
{
|
||||
"valid": true,
|
||||
"issueCount": 0,
|
||||
"formattedIssues": "✓ No errors detected. Rules are valid."
|
||||
}
|
||||
```
|
||||
|
||||
### Example response with errors
|
||||
|
||||
```json
|
||||
{
|
||||
"valid": false,
|
||||
"issueCount": 1,
|
||||
"formattedIssues": "Found 1 issue(s) in rules source:\n\nERROR: Unexpected token ';' [Ln 4, Col 32]\n```\n allow read, write: if true;;\n ^\n```",
|
||||
"rawIssues": [
|
||||
{
|
||||
"sourcePosition": {
|
||||
"line": 4,
|
||||
"column": 32,
|
||||
"currentOffset": 105,
|
||||
"endOffset": 106
|
||||
},
|
||||
"description": "Unexpected token ';'",
|
||||
"severity": "ERROR"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The tool will return errors for:
|
||||
- Missing or empty `source` parameter
|
||||
- API errors when calling the Firebase Rules service
|
||||
- Network connectivity issues
|
||||
|
||||
## Use Cases
|
||||
|
||||
1. **Pre-deployment validation**: Validate rules before deploying to production
|
||||
2. **CI/CD integration**: Integrate rules validation into your build pipeline
|
||||
3. **Development workflow**: Quickly check rules syntax while developing
|
||||
4. **Error debugging**: Get detailed error locations with code snippets
|
||||
|
||||
## Related Tools
|
||||
|
||||
- [firestore-get-rules]({{< ref "firestore-get-rules" >}}): Retrieve current active rules
|
||||
- [firestore-query-collection]({{< ref "firestore-query-collection" >}}): Test rules by querying collections
|
||||
37
docs/en/resources/tools/utility/wait.md
Normal file
37
docs/en/resources/tools/utility/wait.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: "wait"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "wait" tool pauses execution for a specified duration.
|
||||
aliases:
|
||||
- /resources/tools/utility/wait
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `wait` tool pauses execution for a specified duration. This can be useful in workflows where a delay is needed between steps.
|
||||
|
||||
`wait` takes one input parameter `duration` which is a string representing the time to wait (e.g., "10s", "2m", "1h").
|
||||
|
||||
{{% notice info %}}
|
||||
This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents.
|
||||
{{% /notice %}}
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
wait_for_tool:
|
||||
kind: wait
|
||||
description: Use this tool to pause execution for a specified duration.
|
||||
timeout: 30s
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
|
||||
| kind | string | true | Must be "wait". |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
| timeout | string | true | The default duration the tool can wait for. |
|
||||
@@ -220,7 +220,7 @@
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"version = \"0.8.0\" # x-release-please-version\n",
|
||||
"version = \"0.9.0\" # x-release-please-version\n",
|
||||
"! curl -O https://storage.googleapis.com/genai-toolbox/v{version}/linux/amd64/toolbox\n",
|
||||
"\n",
|
||||
"# Make the binary executable\n",
|
||||
|
||||
@@ -179,7 +179,7 @@ to use BigQuery, and then run the Toolbox server.
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -292,6 +292,9 @@ to use BigQuery, and then run the Toolbox server.
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
{{< notice note >}}
|
||||
Toolbox enables dynamic reloading by default. To disable, use the `--disable-reload` flag.
|
||||
{{< /notice >}}
|
||||
|
||||
## Step 3: Connect your agent to Toolbox
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
<!-- {x-release-please-start-version} -->
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.8.0/$OS/toolbox
|
||||
curl -O https://storage.googleapis.com/genai-toolbox/v0.9.0/$OS/toolbox
|
||||
```
|
||||
<!-- {x-release-please-end} -->
|
||||
|
||||
@@ -208,17 +208,25 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||
1. Type `y` when it asks to install the inspector package.
|
||||
|
||||
1. It should show the following when the MCP Inspector is up and running:
|
||||
1. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
|
||||
```bash
|
||||
🔍 MCP Inspector is up and running at http://127.0.0.1:5173 🚀
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `SSE`.
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp/sse`.
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click Connect.
|
||||
|
||||
@@ -228,4 +236,4 @@ In this section, we will download Toolbox, configure our tools in a
|
||||
|
||||

|
||||
|
||||
1. Test out your tools here!
|
||||
1. Test out your tools here!
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 32 KiB |
7
docs/en/sdks/_index.md
Normal file
7
docs/en/sdks/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "SDKs"
|
||||
type: docs
|
||||
weight: 6
|
||||
description: >
|
||||
Client SDKs to connect to the MCP Toolbox server.
|
||||
---
|
||||
15
docs/en/sdks/go-sdk.md
Normal file
15
docs/en/sdks/go-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "Go SDK"
|
||||
weight: 2
|
||||
description: Go lang client SDK
|
||||
icon: fa-brands fa-golang
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-go"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-go"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://github.com/googleapis/mcp-toolbox-sdk-go"/>
|
||||
</head>
|
||||
</html>
|
||||
15
docs/en/sdks/js-sdk.md
Normal file
15
docs/en/sdks/js-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "JS SDK"
|
||||
weight: 2
|
||||
description: Javascript client SDK
|
||||
icon: fa-brands fa-node-js
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-js"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-js"/>
|
||||
<meta http-equiv="refresh" content="0;url=https://github.com/googleapis/mcp-toolbox-sdk-js"/>
|
||||
</head>
|
||||
</html>
|
||||
15
docs/en/sdks/python-sdk.md
Normal file
15
docs/en/sdks/python-sdk.md
Normal file
@@ -0,0 +1,15 @@
|
||||
---
|
||||
title: "Python SDK"
|
||||
weight: 2
|
||||
description: Python client SDK
|
||||
icon: fa-brands fa-python
|
||||
manualLink: "https://github.com/googleapis/mcp-toolbox-sdk-python"
|
||||
manualLinkTarget: _blank
|
||||
---
|
||||
|
||||
<html>
|
||||
<head>
|
||||
<link rel="canonical" href="https://github.com/googleapis/mcp-toolbox-sdk-python"/>
|
||||
<meta http-equiv="refresh" content="0;url=hhttps://github.com/googleapis/mcp-toolbox-sdk-python"/>
|
||||
</head>
|
||||
</html>
|
||||
62
go.mod
62
go.mod
@@ -2,18 +2,20 @@ module github.com/googleapis/genai-toolbox
|
||||
|
||||
go 1.23.8
|
||||
|
||||
toolchain go1.24.4
|
||||
toolchain go1.24.5
|
||||
|
||||
require (
|
||||
cloud.google.com/go/alloydbconn v1.15.3
|
||||
cloud.google.com/go/alloydbconn v1.15.4
|
||||
cloud.google.com/go/bigquery v1.69.0
|
||||
cloud.google.com/go/bigtable v1.38.0
|
||||
cloud.google.com/go/cloudsqlconn v1.17.2
|
||||
cloud.google.com/go/cloudsqlconn v1.17.3
|
||||
cloud.google.com/go/firestore v1.18.0
|
||||
cloud.google.com/go/spanner v1.83.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0
|
||||
github.com/couchbase/gocb/v2 v2.10.0
|
||||
github.com/couchbase/gocb/v2 v2.10.1
|
||||
github.com/couchbase/tools-common/http v1.0.9
|
||||
github.com/fsnotify/fsnotify v1.9.0
|
||||
github.com/go-chi/chi/v5 v5.2.2
|
||||
github.com/go-chi/httplog/v2 v2.1.1
|
||||
github.com/go-chi/render v1.0.3
|
||||
@@ -28,17 +30,17 @@ require (
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.1
|
||||
github.com/redis/go-redis/v9 v9.11.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/valkey-io/valkey-go v1.0.62
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.61.0
|
||||
go.opentelemetry.io/otel v1.36.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0
|
||||
go.opentelemetry.io/otel/metric v1.36.0
|
||||
go.opentelemetry.io/otel/sdk v1.36.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0
|
||||
go.opentelemetry.io/otel/trace v1.36.0
|
||||
github.com/valkey-io/valkey-go v1.0.63
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
||||
go.opentelemetry.io/otel v1.37.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.37.0
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0
|
||||
go.opentelemetry.io/otel/metric v1.37.0
|
||||
go.opentelemetry.io/otel/sdk v1.37.0
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.37.0
|
||||
golang.org/x/oauth2 v0.30.0
|
||||
google.golang.org/api v0.240.0
|
||||
google.golang.org/api v0.242.0
|
||||
modernc.org/sqlite v1.38.0
|
||||
)
|
||||
|
||||
@@ -47,7 +49,7 @@ require golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect
|
||||
require (
|
||||
cel.dev/expr v0.23.0 // indirect
|
||||
cloud.google.com/go v0.121.2 // indirect
|
||||
cloud.google.com/go/alloydb v1.16.1 // indirect
|
||||
cloud.google.com/go/alloydb v1.18.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.2 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.7.0 // indirect
|
||||
@@ -64,7 +66,7 @@ require (
|
||||
github.com/cenkalti/backoff/v5 v5.0.2 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.7.0 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.7.1 // indirect
|
||||
github.com/couchbase/gocbcoreps v0.1.3 // indirect
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||
@@ -91,7 +93,7 @@ require (
|
||||
github.com/googleapis/gax-go/v2 v2.14.2 // indirect
|
||||
github.com/gorilla/websocket v1.5.3 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
@@ -116,26 +118,26 @@ require (
|
||||
go.opentelemetry.io/contrib/detectors/gcp v1.36.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.36.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.36.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.36.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.36.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.6.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.37.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.37.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.37.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.7.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
golang.org/x/crypto v0.39.0 // indirect
|
||||
golang.org/x/crypto v0.40.0 // indirect
|
||||
golang.org/x/mod v0.25.0 // indirect
|
||||
golang.org/x/net v0.41.0 // indirect
|
||||
golang.org/x/sync v0.15.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/net v0.42.0 // indirect
|
||||
golang.org/x/sync v0.16.0 // indirect
|
||||
golang.org/x/sys v0.34.0 // indirect
|
||||
golang.org/x/text v0.27.0 // indirect
|
||||
golang.org/x/time v0.12.0 // indirect
|
||||
golang.org/x/tools v0.33.0 // indirect
|
||||
golang.org/x/tools v0.34.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7 // indirect
|
||||
google.golang.org/grpc v1.73.0 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
modernc.org/libc v1.65.10 // indirect
|
||||
|
||||
120
go.sum
120
go.sum
@@ -53,10 +53,10 @@ cloud.google.com/go/aiplatform v1.27.0/go.mod h1:Bvxqtl40l0WImSb04d0hXFU7gDOiq9j
|
||||
cloud.google.com/go/aiplatform v1.35.0/go.mod h1:7MFT/vCaOyZT/4IIFfxH4ErVg/4ku6lKv3w0+tFTgXQ=
|
||||
cloud.google.com/go/aiplatform v1.36.1/go.mod h1:WTm12vJRPARNvJ+v6P52RDHCNe4AhvjcIZ/9/RRHy/k=
|
||||
cloud.google.com/go/aiplatform v1.37.0/go.mod h1:IU2Cv29Lv9oCn/9LkFiiuKfwrRTq+QQMbW+hPCxJGZw=
|
||||
cloud.google.com/go/alloydb v1.16.1 h1:pW4D0O2jAfAjoOEI1bgChPwMHWE8X8BjwSO0tfWkWvk=
|
||||
cloud.google.com/go/alloydb v1.16.1/go.mod h1:zeZuGJ5mEaQE70FMXEvZIp5hQLR9yrGnHo1YUOncWRY=
|
||||
cloud.google.com/go/alloydbconn v1.15.3 h1:j0Y0+LpVjdyUguX0uwsaeTtq4tQUZiFvsO52AH+yusY=
|
||||
cloud.google.com/go/alloydbconn v1.15.3/go.mod h1:9yrNzUeMr3wR/D4gTJrh5ph2VDW/19tAMV7TlNuyRfM=
|
||||
cloud.google.com/go/alloydb v1.18.0 h1:P+s1oek+sF3MlcumZuOj2ueUlusVwr3IT0R0vUbMA88=
|
||||
cloud.google.com/go/alloydb v1.18.0/go.mod h1:iB/PmQYLHwDXCGCc0weeL5ORP6GadFjXJlRZ9pE0vSY=
|
||||
cloud.google.com/go/alloydbconn v1.15.4 h1:RvtwKVq0YxYQFTKaW5jQWGPAVSvxO3ebqKj2oyl009A=
|
||||
cloud.google.com/go/alloydbconn v1.15.4/go.mod h1:m5Db60PJv75Hz9uIaIIJR7ZPQazVC4VGxlhxNqYCBjk=
|
||||
cloud.google.com/go/analytics v0.11.0/go.mod h1:DjEWCu41bVbYcKyvlws9Er60YE4a//bK6mnhWvQeFNI=
|
||||
cloud.google.com/go/analytics v0.12.0/go.mod h1:gkfj9h6XRf9+TS4bmuhPEShsh3hH8PAZzm/41OOhQd4=
|
||||
cloud.google.com/go/analytics v0.17.0/go.mod h1:WXFa3WSym4IZ+JiKmavYdJwGG/CvpqiqczmL59bTD9M=
|
||||
@@ -167,8 +167,8 @@ cloud.google.com/go/cloudbuild v1.9.0/go.mod h1:qK1d7s4QlO0VwfYn5YuClDGg2hfmLZEb
|
||||
cloud.google.com/go/clouddms v1.3.0/go.mod h1:oK6XsCDdW4Ib3jCCBugx+gVjevp2TMXFtgxvPSee3OM=
|
||||
cloud.google.com/go/clouddms v1.4.0/go.mod h1:Eh7sUGCC+aKry14O1NRljhjyrr0NFC0G2cjwX0cByRk=
|
||||
cloud.google.com/go/clouddms v1.5.0/go.mod h1:QSxQnhikCLUw13iAbffF2CZxAER3xDGNHjsTAkQJcQA=
|
||||
cloud.google.com/go/cloudsqlconn v1.17.2 h1:SxSt6ujMxK1KyxKAI2Z5raT2n3geN7ipu6bA8f7iR7E=
|
||||
cloud.google.com/go/cloudsqlconn v1.17.2/go.mod h1:l7NymuoD+hycOo+92SJEyETPtE05oRG4oXjcH3swftw=
|
||||
cloud.google.com/go/cloudsqlconn v1.17.3 h1:dAEgQmhj9NHVRqven4elTBCbOWOtFPSNjAqBoznJSpc=
|
||||
cloud.google.com/go/cloudsqlconn v1.17.3/go.mod h1:5AHAXT4hbs2+EbzNDBxPu9QU+tJwRZyWNPwwiE8MzRs=
|
||||
cloud.google.com/go/cloudtasks v1.5.0/go.mod h1:fD92REy1x5woxkKEkLdvavGnPJGEn8Uic9nWuLzqCpY=
|
||||
cloud.google.com/go/cloudtasks v1.6.0/go.mod h1:C6Io+sxuke9/KNRkbQpihnW93SWDU3uXt92nu85HkYI=
|
||||
cloud.google.com/go/cloudtasks v1.7.0/go.mod h1:ImsfdYWwlWNJbdgPIIGJWC+gemEGTBK/SunNQQNCAb4=
|
||||
@@ -293,6 +293,8 @@ cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLY
|
||||
cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs=
|
||||
cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg=
|
||||
cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE=
|
||||
cloud.google.com/go/firestore v1.18.0 h1:cuydCaLS7Vl2SatAeivXyhbhDEIR8BDmtn4egDhIn2s=
|
||||
cloud.google.com/go/firestore v1.18.0/go.mod h1:5ye0v48PhseZBdcl0qbl3uttu7FIEwEYVaWm0UIEOEU=
|
||||
cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk=
|
||||
cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg=
|
||||
cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY=
|
||||
@@ -710,10 +712,10 @@ github.com/cncf/xds/go v0.0.0-20230105202645-06c439db220b/go.mod h1:eXthEFrGJvWH
|
||||
github.com/cncf/xds/go v0.0.0-20230607035331-e9ce68804cb4/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f h1:C5bqEmzEPLsHm9Mv73lSE9e9bKV23aB1vxOsmZrkl3k=
|
||||
github.com/cncf/xds/go v0.0.0-20250326154945-ae57f3c0d45f/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/couchbase/gocb/v2 v2.10.0 h1:NNxZ4okToU1Ylqp6F8tE41CEJQPhb2WjufryAkeubOk=
|
||||
github.com/couchbase/gocb/v2 v2.10.0/go.mod h1:OSbMfQkP7ltbKiDZhsT2mGDhkQNmvGXxptKcxAUJQ2Y=
|
||||
github.com/couchbase/gocbcore/v10 v10.7.0 h1:lAEi0PNeEGKOu8pWrPUdtLOT2oGr1J/UTdGHVPC3r/0=
|
||||
github.com/couchbase/gocbcore/v10 v10.7.0/go.mod h1:Q8JWVenMCEOuRgrDQKApHbzzPif38HzefGgRVe9apAI=
|
||||
github.com/couchbase/gocb/v2 v2.10.1 h1:5r1jngGxw3dTZdtq6Xmjq3pdU6hOwRvynvbVIp58T64=
|
||||
github.com/couchbase/gocb/v2 v2.10.1/go.mod h1:GGEJuYjrfnPHCQLcxTcIco+Puy63PS2p8QQd8FRw66I=
|
||||
github.com/couchbase/gocbcore/v10 v10.7.1 h1:6jsNDtqyfoQ8Xg6kv99rzccc3CrHbp7FjeY+ahWXTF4=
|
||||
github.com/couchbase/gocbcore/v10 v10.7.1/go.mod h1:Q8JWVenMCEOuRgrDQKApHbzzPif38HzefGgRVe9apAI=
|
||||
github.com/couchbase/gocbcoreps v0.1.3 h1:fILaKGCjxFIeCgAUG8FGmRDSpdrRggohOMKEgO9CUpg=
|
||||
github.com/couchbase/gocbcoreps v0.1.3/go.mod h1:hBFpDNPnRno6HH5cRXExhqXYRmTsFJlFHQx7vztcXPk=
|
||||
github.com/couchbase/goprotostellar v1.0.2 h1:yoPbAL9sCtcyZ5e/DcU5PRMOEFaJrF9awXYu3VPfGls=
|
||||
@@ -765,6 +767,8 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2
|
||||
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
@@ -947,8 +951,8 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vb
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3 h1:5ZPtiqj0JL5oKWmcsq4VMaAW5ukBEgSGXEN89zeH1Jo=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1 h1:X5VWvz21y3gzm9Nw/kaUeku/1+uBhcekkmy4IkffJww=
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||
@@ -1092,8 +1096,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA=
|
||||
github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/valkey-io/valkey-go v1.0.62 h1:oQdPlQGRyxcQWL8fnu6J3SCaQwayc/hRZifjJIaJqu0=
|
||||
github.com/valkey-io/valkey-go v1.0.62/go.mod h1:bHmwjIEOrGq/ubOJfh5uMRs7Xj6mV3mQ/ZXUbmqpjqY=
|
||||
github.com/valkey-io/valkey-go v1.0.63 h1:LNlDTcUxy9jxrmGHSvd0s/NsgEmQbvREYvvBAHCIir0=
|
||||
github.com/valkey-io/valkey-go v1.0.63/go.mod h1:bHmwjIEOrGq/ubOJfh5uMRs7Xj6mV3mQ/ZXUbmqpjqY=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 h1:ilQV1hzziu+LLM3zUTJ0trRztfwgjqKnBWNtSRkbmwM=
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78/go.mod h1:aL8wCCfTfSfmXjznFBSZNN13rSJjlIOI1fUNAtF7rmI=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
@@ -1126,37 +1130,37 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.6
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0 h1:F7Jx+6hwnZ41NSFTO5q4LYDtJRXBf2PD0rNBkeB/lus=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.61.0 h1:cxOVDJ30qfzV27G5p9WMtJUB/3cXC0iL+u9EV1fSOws=
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.61.0/go.mod h1:Y+xiUbWetg65vAroDZcIzJ5wyPNWRH32EoIV9rIaa0g=
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.36.0 h1:Txhy/1LZIbbnutftc5pdU8Y9vOQuAkuIOFXuLsdDejs=
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.36.0/go.mod h1:M3A0491jGFPNHU8b3zEW7r/gtsMpGOsFUO3WL+SZ1xw=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0 h1:xrAb/G80z/l5JL6XlmUMSD1i6W8vXkWrLfmkD3w/zZo=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.36.0/go.mod h1:UREJtqioFu5awNaCR8aEx7MfJROFlAWb6lPaJFbHaG0=
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.36.0 h1:SoCgXYF4ISDtNyfLUzsGDaaudZVTx2yJhOyBO0+/GYk=
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.36.0/go.mod h1:VHu48l0YTRKSObdPQ+Sb8xMZvdnJlN7yhHuHoPgNqHM=
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.36.0 h1:UBoZjbx483GslNKYK2YpfvePTJV4BHGeFd8+b7dexiM=
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.36.0/go.mod h1:adDDRry19/n9WoA7mSCMjoVJcmzK/bZYzX9SR+g2+W4=
|
||||
go.opentelemetry.io/otel v1.36.0 h1:UumtzIklRBY6cI/lllNZlALOF5nNIzJVb16APdvgTXg=
|
||||
go.opentelemetry.io/otel v1.36.0/go.mod h1:/TcFMXYjyRNh8khOAO9ybYkqaDBb/70aVwkNML4pP8E=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0 h1:gAU726w9J8fwr4qRDqu1GYMNNs4gXrU+Pv20/N1UpB4=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.36.0/go.mod h1:RboSDkp7N292rgu+T0MgVt2qgFGu6qa1RpZDOtpL76w=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.36.0 h1:dNzwXjZKpMpE2JhmO+9HsPl42NIXFIFSUSSs0fiqra0=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.36.0/go.mod h1:90PoxvaEB5n6AOdZvi+yWJQoE95U8Dhhw2bSyRqnTD0=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0 h1:nRVXXvf78e00EwY6Wp0YII8ww2JVWshZ20HfTlE11AM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.36.0/go.mod h1:r49hO7CgrxY9Voaj3Xe8pANWtr0Oq916d0XAmOoCZAQ=
|
||||
go.opentelemetry.io/otel/metric v1.36.0 h1:MoWPKVhQvJ+eeXWHFBOPoBOi20jh6Iq2CcCREuTYufE=
|
||||
go.opentelemetry.io/otel/metric v1.36.0/go.mod h1:zC7Ks+yeyJt4xig9DEw9kuUFe5C3zLbVjV2PzT6qzbs=
|
||||
go.opentelemetry.io/otel/sdk v1.36.0 h1:b6SYIuLRs88ztox4EyrvRti80uXIFy+Sqzoh9kFULbs=
|
||||
go.opentelemetry.io/otel/sdk v1.36.0/go.mod h1:+lC+mTgD+MUWfjJubi2vvXWcVxyr9rmlshZni72pXeY=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0 h1:r0ntwwGosWGaa0CrSt8cuNuTcccMXERFwHX4dThiPis=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.36.0/go.mod h1:qTNOhFDfKRwX0yXOqJYegL5WRaW376QbB7P4Pb0qva4=
|
||||
go.opentelemetry.io/otel/trace v1.36.0 h1:ahxWNuqZjpdiFAyrIoQ4GIiAIhxAunQR6MUoKrsNd4w=
|
||||
go.opentelemetry.io/otel/trace v1.36.0/go.mod h1:gQ+OnDZzrybY4k4seLzPAWNwVBBVlF2szhehOBB/tGA=
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0 h1:1+EHlhAe/tukctfePZRrDruB9vn7MdwyC+rf36nUSPM=
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0/go.mod h1:skzESZBY3IYcqJgImc+fwXQWflvVe+jZxoA/uw60NaI=
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.37.0 h1:cp8AFiM/qjBm10C/ATIRnEDXpD5MBknrA0ANw4T2/ss=
|
||||
go.opentelemetry.io/contrib/propagators/aws v1.37.0/go.mod h1:Cy8Hk2E2iSGEbsLnPUdeigrexaAOAGIAmBFK919EQs0=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.37.0 h1:0aGKdIuVhy5l4GClAjl72ntkZJhijf2wg1S7b5oLoYA=
|
||||
go.opentelemetry.io/contrib/propagators/b3 v1.37.0/go.mod h1:nhyrxEJEOQdwR15zXrCKI6+cJK60PXAkJ/jRyfhr2mg=
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.37.0 h1:pW+qDVo0jB0rLsNeaP85xLuz20cvsECUcN7TE+D8YTM=
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.37.0/go.mod h1:x7bd+t034hxLTve1hF9Yn9qQJlO/pP8H5pWIt7+gsFM=
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.37.0 h1:tVjnBF6EiTDMXoq2Xuc2vK0I7MTbEs05II/0j9mMK+E=
|
||||
go.opentelemetry.io/contrib/propagators/ot v1.37.0/go.mod h1:MQjyNXtxAC8PGN9gzPtO4GY5zuP+RI3XX53uWbCTvEQ=
|
||||
go.opentelemetry.io/otel v1.37.0 h1:9zhNfelUvx0KBfu/gb+ZgeAfAgtWrfHJZcAqFC228wQ=
|
||||
go.opentelemetry.io/otel v1.37.0/go.mod h1:ehE/umFRLnuLa/vSccNq9oS1ErUlkkK71gMcN34UG8I=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.37.0 h1:9PgnL3QNlj10uGxExowIDIZu66aVBwWhXmbOp1pa6RA=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlpmetric/otlpmetrichttp v1.37.0/go.mod h1:0ineDcLELf6JmKfuo0wvvhAVMuxWFYvkTin2iV4ydPQ=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0 h1:Ahq7pZmv87yiyn3jeFz/LekZmPLLdKejuO3NcK9MssM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.37.0/go.mod h1:MJTqhM0im3mRLw1i8uGHnCvUEeS7VwRyxlLC78PA18M=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0 h1:bDMKF3RUSxshZ5OjOTi8rsHGaPKsAt76FaqgvIUySLc=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.37.0/go.mod h1:dDT67G/IkA46Mr2l9Uj7HsQVwsjASyV9SjGofsiUZDA=
|
||||
go.opentelemetry.io/otel/metric v1.37.0 h1:mvwbQS5m0tbmqML4NqK+e3aDiO02vsf/WgbsdpcPoZE=
|
||||
go.opentelemetry.io/otel/metric v1.37.0/go.mod h1:04wGrZurHYKOc+RKeye86GwKiTb9FKm1WHtO+4EVr2E=
|
||||
go.opentelemetry.io/otel/sdk v1.37.0 h1:ItB0QUqnjesGRvNcmAcU0LyvkVyGJ2xftD29bWdDvKI=
|
||||
go.opentelemetry.io/otel/sdk v1.37.0/go.mod h1:VredYzxUvuo2q3WRcDnKDjbdvmO0sCzOvVAiY+yUkAg=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0 h1:90lI228XrB9jCMuSdA0673aubgRobVZFhbjxHHspCPc=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0/go.mod h1:cNen4ZWfiD37l5NhS+Keb5RXVWZWpRE+9WyVCpbo5ps=
|
||||
go.opentelemetry.io/otel/trace v1.37.0 h1:HLdcFNbRQBE2imdSEgm/kwqmQj1Or1l/7bW6mxVK7z4=
|
||||
go.opentelemetry.io/otel/trace v1.37.0/go.mod h1:TlgrlQ+PtQO5XFerSPUYG0JSgGyryXewPGyayAWSBS0=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||
go.opentelemetry.io/proto/otlp v1.6.0 h1:jQjP+AQyTf+Fe7OKj/MfkDrmK4MNVtw2NpXsf9fefDI=
|
||||
go.opentelemetry.io/proto/otlp v1.6.0/go.mod h1:cicgGehlFuNdgZkcALOCh3VE6K/u2tAjzlRhDwmVpZc=
|
||||
go.opentelemetry.io/proto/otlp v1.7.0 h1:jX1VolD6nHuFzOYso2E73H85i92Mv8JQYk0K9vz09os=
|
||||
go.opentelemetry.io/proto/otlp v1.7.0/go.mod h1:fSKjH6YJ7HDlwzltzyMj036AJ3ejJLCgCSHGj4efDDo=
|
||||
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
|
||||
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
@@ -1176,8 +1180,8 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
|
||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM=
|
||||
golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY=
|
||||
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
@@ -1296,8 +1300,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
|
||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||
golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw=
|
||||
golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA=
|
||||
golang.org/x/net v0.42.0 h1:jzkYrhi3YQWD6MLBJcsklgQsoAcw89EcZbJw8Z614hs=
|
||||
golang.org/x/net v0.42.0/go.mod h1:FF1RA5d3u7nAYA4z2TkclSCKh68eSXtiFwcWQpPXdt8=
|
||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
|
||||
@@ -1345,8 +1349,8 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
|
||||
golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw=
|
||||
golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -1426,8 +1430,8 @@ golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.34.0 h1:H5Y5sJ2L2JRdyv7ROF1he/lPdvFsd0mJHFw2ThKHxLA=
|
||||
golang.org/x/sys v0.34.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
|
||||
@@ -1452,8 +1456,8 @@ golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4=
|
||||
golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
@@ -1526,8 +1530,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
||||
golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
|
||||
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
|
||||
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
|
||||
golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo=
|
||||
golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -1605,8 +1609,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/
|
||||
google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI=
|
||||
google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0=
|
||||
google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg=
|
||||
google.golang.org/api v0.240.0 h1:PxG3AA2UIqT1ofIzWV2COM3j3JagKTKSwy7L6RHNXNU=
|
||||
google.golang.org/api v0.240.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50=
|
||||
google.golang.org/api v0.242.0 h1:7Lnb1nfnpvbkCiZek6IXKdJ0MFuAZNAJKQfA1ws62xg=
|
||||
google.golang.org/api v0.242.0/go.mod h1:cOVEm2TpdAGHL2z+UwyS+kmlGr3bVWQQ6sYEqkKje50=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -1751,8 +1755,8 @@ google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2 h1:1tXaIXCracvtsRx
|
||||
google.golang.org/genproto v0.0.0-20250505200425-f936aa4a68b2/go.mod h1:49MsLSx0oWMOZqcpB3uL8ZOkAh1+TndpJ8ONoCBWiZk=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822 h1:oWVWY3NzT7KJppx2UKhKmzPq4SRe0LdCijVRwvGeikY=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250603155806-513f23925822/go.mod h1:h3c4v36UTKzUiuaOKQ6gr3S+0hovBtUrXzTG/i3+XEc=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822 h1:fc6jSaCT0vBduLYZHYrBBNY4dsWuvgyff9noRNDdBeE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250603155806-513f23925822/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7 h1:pFyd6EwwL2TqFf8emdthzeX+gZE1ElRq3iM8pui4KBY=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250707201910-8d1bb00bc6a7/go.mod h1:qQ0YXyHHx3XkvlzUtpXDkS29lDSafHMZBAZDc03LQ3A=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
|
||||
@@ -28,6 +28,7 @@ func TestLoadPrebuiltToolYAMLs(t *testing.T) {
|
||||
"cloud-sql-mssql",
|
||||
"cloud-sql-mysql",
|
||||
"cloud-sql-postgres",
|
||||
"firestore",
|
||||
"postgres",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
@@ -68,6 +69,7 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
cloudsqlpg_config, _ := Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := Get("cloud-sql-mssql")
|
||||
firestoreconfig, _ := Get("firestore")
|
||||
postgresconfig, _ := Get("postgres")
|
||||
spanner_config, _ := Get("spanner")
|
||||
spannerpg_config, _ := Get("spanner-postgres")
|
||||
@@ -86,6 +88,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(cloudsqlmssql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch cloud sql mssql prebuilt tools yaml")
|
||||
}
|
||||
if len(firestoreconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch firestore prebuilt tools yaml")
|
||||
}
|
||||
if len(postgresconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch postgres prebuilt tools yaml")
|
||||
}
|
||||
|
||||
@@ -154,9 +154,10 @@ tools:
|
||||
) USING utf8mb4) AS object_details
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TABLES T
|
||||
CROSS JOIN (SELECT @table_names := ?) AS variables
|
||||
WHERE
|
||||
T.TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
AND (NULLIF(TRIM(?), '') IS NULL OR FIND_IN_SET(T.TABLE_NAME, ?))
|
||||
AND (NULLIF(TRIM(@table_names), '') IS NULL OR FIND_IN_SET(T.TABLE_NAME, @table_names))
|
||||
AND T.TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY
|
||||
T.TABLE_SCHEMA, T.TABLE_NAME;
|
||||
@@ -164,6 +165,7 @@ tools:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
default: ""
|
||||
toolsets:
|
||||
cloud-sql-mysql-database-tools:
|
||||
- execute_sql
|
||||
|
||||
42
internal/prebuiltconfigs/tools/firestore.yaml
Normal file
42
internal/prebuiltconfigs/tools/firestore.yaml
Normal file
@@ -0,0 +1,42 @@
|
||||
sources:
|
||||
firestore-source:
|
||||
kind: firestore
|
||||
project: ${FIRESTORE_PROJECT}
|
||||
database: ${FIRESTORE_DATABASE} # Optional, defaults to "(default)" if not specified
|
||||
|
||||
tools:
|
||||
firestore-get-documents:
|
||||
kind: firestore-get-documents
|
||||
source: firestore-source
|
||||
description: Gets multiple documents from Firestore by their paths
|
||||
firestore-list-collections:
|
||||
kind: firestore-list-collections
|
||||
source: firestore-source
|
||||
description: List Firestore collections for a given parent path
|
||||
firestore-delete-documents:
|
||||
kind: firestore-delete-documents
|
||||
source: firestore-source
|
||||
description: Delete multiple documents from Firestore
|
||||
firestore-query-collection:
|
||||
kind: firestore-query-collection
|
||||
source: firestore-source
|
||||
description: |
|
||||
Retrieves one or more Firestore documents from a collection in a database in the current project by a collection with a full document path.
|
||||
Use this if you know the exact path of a collection and the filtering clause you would like for the document.
|
||||
firestore-get-rules:
|
||||
kind: firestore-get-rules
|
||||
source: firestore-source
|
||||
description: Retrieves the active Firestore security rules for the current project
|
||||
firestore-validate-rules:
|
||||
kind: firestore-validate-rules
|
||||
source: firestore-source
|
||||
description: Checks the provided Firestore Rules source for syntax and validation errors. Provide the source code to validate.
|
||||
|
||||
toolsets:
|
||||
firestore-database-tools:
|
||||
- firestore-get-documents
|
||||
- firestore-list-collections
|
||||
- firestore-delete-documents
|
||||
- firestore-query-collection
|
||||
- firestore-get-rules
|
||||
- firestore-validate-rules
|
||||
@@ -75,7 +75,7 @@ func toolsetHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
)
|
||||
}()
|
||||
|
||||
toolset, ok := s.toolsets[toolsetName]
|
||||
toolset, ok := s.ResourceMgr.GetToolset(toolsetName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("toolset %q does not exist", toolsetName)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
@@ -111,7 +111,7 @@ func toolGetHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
metric.WithAttributes(attribute.String("toolbox.operation.status", status)),
|
||||
)
|
||||
}()
|
||||
tool, ok := s.tools[toolName]
|
||||
tool, ok := s.ResourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
@@ -133,6 +133,7 @@ func toolGetHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
ctx, span := s.instrumentation.Tracer.Start(r.Context(), "toolbox/server/tool/invoke")
|
||||
r = r.WithContext(ctx)
|
||||
ctx = util.WithLogger(r.Context(), s.logger)
|
||||
|
||||
toolName := chi.URLParam(r, "toolName")
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
@@ -156,7 +157,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
)
|
||||
}()
|
||||
|
||||
tool, ok := s.tools[toolName]
|
||||
tool, ok := s.ResourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
@@ -167,7 +168,7 @@ func toolInvokeHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
// Tool authentication
|
||||
// claimsFromAuth maps the name of the authservice to the claims retrieved from it.
|
||||
claimsFromAuth := make(map[string]map[string]any)
|
||||
for _, aS := range s.authServices {
|
||||
for _, aS := range s.ResourceMgr.GetAuthServiceMap() {
|
||||
claims, err := aS.GetClaimsFromHeader(ctx, r.Header)
|
||||
if err != nil {
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
|
||||
@@ -42,7 +42,7 @@ type MockTool struct {
|
||||
manifest tools.Manifest
|
||||
}
|
||||
|
||||
func (t MockTool) Invoke(context.Context, tools.ParamValues) ([]any, error) {
|
||||
func (t MockTool) Invoke(context.Context, tools.ParamValues) (any, error) {
|
||||
mock := []any{t.Name}
|
||||
return mock, nil
|
||||
}
|
||||
@@ -147,14 +147,23 @@ func setUpServer(t *testing.T, router string, tools map[string]tools.Tool, tools
|
||||
t.Fatalf("unable to setup otel: %s", err)
|
||||
}
|
||||
|
||||
instrumentation, err := CreateTelemetryInstrumentation(fakeVersionString)
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(fakeVersionString)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create custom metrics: %s", err)
|
||||
}
|
||||
|
||||
sseManager := newSseManager(ctx)
|
||||
|
||||
server := Server{version: fakeVersionString, logger: testLogger, instrumentation: instrumentation, sseManager: sseManager, tools: tools, toolsets: toolsets}
|
||||
resourceManager := NewResourceManager(nil, nil, tools, toolsets)
|
||||
|
||||
server := Server{
|
||||
version: fakeVersionString,
|
||||
logger: testLogger,
|
||||
instrumentation: instrumentation,
|
||||
sseManager: sseManager,
|
||||
ResourceMgr: resourceManager,
|
||||
}
|
||||
|
||||
var r chi.Router
|
||||
switch router {
|
||||
case "api":
|
||||
|
||||
@@ -53,6 +53,8 @@ type ServerConfig struct {
|
||||
TelemetryServiceName string
|
||||
// Stdio indicates if Toolbox is listening via MCP stdio.
|
||||
Stdio bool
|
||||
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
||||
DisableReload bool
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
|
||||
@@ -357,6 +357,17 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
protocolVersion = v20250326.PROTOCOL_VERSION
|
||||
}
|
||||
|
||||
// check if client have `MCP-Protocol-Version` header
|
||||
headerProtocolVersion := r.Header.Get("MCP-Protocol-Version")
|
||||
if headerProtocolVersion != "" {
|
||||
if !mcp.VerifyProtocolVersion(headerProtocolVersion) {
|
||||
err := fmt.Errorf("invalid protocol version: %s", headerProtocolVersion)
|
||||
_ = render.Render(w, r, newErrResponse(err, http.StatusBadRequest))
|
||||
return
|
||||
}
|
||||
protocolVersion = headerProtocolVersion
|
||||
}
|
||||
|
||||
toolsetName := chi.URLParam(r, "toolsetName")
|
||||
s.logger.DebugContext(ctx, fmt.Sprintf("toolset name: %s", toolsetName))
|
||||
span.SetAttributes(attribute.String("toolset_name", toolsetName))
|
||||
@@ -387,6 +398,7 @@ func httpHandler(s *Server, w http.ResponseWriter, r *http.Request) {
|
||||
id := uuid.New().String()
|
||||
s.logger.DebugContext(ctx, err.Error())
|
||||
render.JSON(w, r, jsonrpc.NewError(id, jsonrpc.PARSE_ERROR, err.Error(), nil))
|
||||
return
|
||||
}
|
||||
|
||||
v, res, err := processMcpMessage(ctx, body, s, protocolVersion, toolsetName)
|
||||
@@ -431,10 +443,6 @@ func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVers
|
||||
return "", jsonrpc.NewError("", jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
if protocolVersion == "" {
|
||||
protocolVersion = v20241105.PROTOCOL_VERSION
|
||||
}
|
||||
|
||||
// Generic baseMessage could either be a JSONRPCNotification or JSONRPCRequest
|
||||
var baseMessage jsonrpc.BaseMessage
|
||||
if err = util.DecodeJSON(bytes.NewBuffer(body), &baseMessage); err != nil {
|
||||
@@ -479,12 +487,12 @@ func processMcpMessage(ctx context.Context, body []byte, s *Server, protocolVers
|
||||
}
|
||||
return v, res, err
|
||||
default:
|
||||
toolset, ok := s.toolsets[toolsetName]
|
||||
toolset, ok := s.ResourceMgr.GetToolset(toolsetName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("toolset does not exist")
|
||||
return "", jsonrpc.NewError(baseMessage.Id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
res, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, s.tools, body)
|
||||
res, err := mcp.ProcessMethod(ctx, protocolVersion, baseMessage.Id, baseMessage.Method, toolset, s.ResourceMgr.GetToolsMap(), body)
|
||||
return "", res, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,15 +24,20 @@ import (
|
||||
mcputil "github.com/googleapis/genai-toolbox/internal/server/mcp/util"
|
||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
||||
// Update the version used in InitializeResponse when this value is updated.
|
||||
const LATEST_PROTOCOL_VERSION = v20250326.PROTOCOL_VERSION
|
||||
const LATEST_PROTOCOL_VERSION = v20250618.PROTOCOL_VERSION
|
||||
|
||||
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{v20241105.PROTOCOL_VERSION, v20250326.PROTOCOL_VERSION}
|
||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
||||
v20241105.PROTOCOL_VERSION,
|
||||
v20250326.PROTOCOL_VERSION,
|
||||
v20250618.PROTOCOL_VERSION,
|
||||
}
|
||||
|
||||
// InitializeResponse runs capability negotiation and protocol version agreement.
|
||||
// This is the Initialization phase of the lifecycle for MCP client-server connections.
|
||||
@@ -61,7 +66,9 @@ func InitializeResponse(ctx context.Context, id jsonrpc.RequestId, body []byte,
|
||||
},
|
||||
},
|
||||
ServerInfo: mcputil.Implementation{
|
||||
Name: mcputil.SERVER_NAME,
|
||||
BaseMetadata: mcputil.BaseMetadata{
|
||||
Name: mcputil.SERVER_NAME,
|
||||
},
|
||||
Version: toolboxVersion,
|
||||
},
|
||||
}
|
||||
@@ -88,12 +95,16 @@ func NotificationHandler(ctx context.Context, body []byte) error {
|
||||
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
||||
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, tools map[string]tools.Tool, body []byte) (any, error) {
|
||||
switch mcpVersion {
|
||||
case v20250618.PROTOCOL_VERSION:
|
||||
return v20250618.ProcessMethod(ctx, id, method, toolset, tools, body)
|
||||
case v20250326.PROTOCOL_VERSION:
|
||||
return v20250326.ProcessMethod(ctx, id, method, toolset, tools, body)
|
||||
case v20241105.PROTOCOL_VERSION:
|
||||
return v20241105.ProcessMethod(ctx, id, method, toolset, tools, body)
|
||||
default:
|
||||
err := fmt.Errorf("invalid protocol version: %s", mcpVersion)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
return v20241105.ProcessMethod(ctx, id, method, toolset, tools, body)
|
||||
}
|
||||
}
|
||||
|
||||
// VerifyProtocolVersion verifies if the version string is valid.
|
||||
func VerifyProtocolVersion(version string) bool {
|
||||
return slices.Contains(SUPPORTED_PROTOCOL_VERSIONS, version)
|
||||
}
|
||||
|
||||
@@ -89,8 +89,22 @@ type ServerCapabilities struct {
|
||||
Tools *ListChanged `json:"tools,omitempty"`
|
||||
}
|
||||
|
||||
// Base interface for metadata with name (identifier) and title (display name) properties.
|
||||
type BaseMetadata struct {
|
||||
// Intended for programmatic or logical use, but used as a display name in past specs
|
||||
// or fallback (if title isn't present).
|
||||
Name string `json:"name"`
|
||||
// Intended for UI and end-user contexts — optimized to be human-readable and easily understood,
|
||||
//even by those unfamiliar with domain-specific terminology.
|
||||
//
|
||||
// If not provided, the name should be used for display (except for Tool,
|
||||
// where `annotations.title` should be given precedence over using `name`,
|
||||
// if present).
|
||||
Title string `json:"title,omitempty"`
|
||||
}
|
||||
|
||||
// Implementation describes the name and version of an MCP implementation.
|
||||
type Implementation struct {
|
||||
Name string `json:"name"`
|
||||
BaseMetadata
|
||||
Version string `json:"version"`
|
||||
}
|
||||
|
||||
@@ -122,7 +122,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, tools map[strin
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
for _, d := range results {
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
if !ok {
|
||||
sliceRes = []any{results}
|
||||
}
|
||||
|
||||
for _, d := range sliceRes {
|
||||
text := TextContent{Type: "text"}
|
||||
dM, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
|
||||
@@ -122,7 +122,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, tools map[strin
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
for _, d := range results {
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
if !ok {
|
||||
sliceRes = []any{results}
|
||||
}
|
||||
|
||||
for _, d := range sliceRes {
|
||||
text := TextContent{Type: "text"}
|
||||
dM, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
|
||||
147
internal/server/mcp/v20250618/method.go
Normal file
147
internal/server/mcp/v20250618/method.go
Normal file
@@ -0,0 +1,147 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20250618
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
func ProcessMethod(ctx context.Context, id jsonrpc.RequestId, method string, toolset tools.Toolset, tools map[string]tools.Tool, body []byte) (any, error) {
|
||||
switch method {
|
||||
case TOOLS_LIST:
|
||||
return toolsListHandler(id, toolset, body)
|
||||
case TOOLS_CALL:
|
||||
return toolsCallHandler(ctx, id, tools, body)
|
||||
default:
|
||||
err := fmt.Errorf("invalid method %s", method)
|
||||
return jsonrpc.NewError(id, jsonrpc.METHOD_NOT_FOUND, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
func toolsListHandler(id jsonrpc.RequestId, toolset tools.Toolset, body []byte) (any, error) {
|
||||
var req ListToolsRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools list request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
result := ListToolsResult{
|
||||
Tools: toolset.McpManifest,
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// toolsCallHandler generate a response for tools call.
|
||||
func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, tools map[string]tools.Tool, body []byte) (any, error) {
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var req CallToolRequest
|
||||
if err = json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools call request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
tool, ok := tools[toolName]
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// marshal arguments and decode it using decodeJSON instead to prevent loss between floats/int.
|
||||
aMarshal, err := json.Marshal(toolArgument)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to marshal tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var data map[string]any
|
||||
if err = util.DecodeJSON(bytes.NewBuffer(aMarshal), &data); err != nil {
|
||||
err = fmt.Errorf("unable to decode tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// claimsFromAuth maps the name of the authservice to the claims retrieved from it.
|
||||
// Since MCP doesn't support auth, an empty map will be use every time.
|
||||
claimsFromAuth := make(map[string]map[string]any)
|
||||
|
||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
if !tool.Authorized([]string{}) {
|
||||
err = fmt.Errorf("unauthorized Tool call: `authRequired` is set for the target Tool")
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, params)
|
||||
if err != nil {
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
if !ok {
|
||||
sliceRes = []any{results}
|
||||
}
|
||||
|
||||
for _, d := range sliceRes {
|
||||
text := TextContent{Type: "text"}
|
||||
dM, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
text.Text = fmt.Sprintf("fail to marshal: %s, result: %s", err, d)
|
||||
} else {
|
||||
text.Text = string(dM)
|
||||
}
|
||||
content = append(content, text)
|
||||
}
|
||||
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: content},
|
||||
}, nil
|
||||
}
|
||||
180
internal/server/mcp/v20250618/types.go
Normal file
180
internal/server/mcp/v20250618/types.go
Normal file
@@ -0,0 +1,180 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20250618
|
||||
|
||||
import (
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// SERVER_NAME is the server name used in Implementation.
|
||||
const SERVER_NAME = "Toolbox"
|
||||
|
||||
// PROTOCOL_VERSION is the version of the MCP protocol in this package.
|
||||
const PROTOCOL_VERSION = "2025-06-18"
|
||||
|
||||
// methods that are supported.
|
||||
const (
|
||||
TOOLS_LIST = "tools/list"
|
||||
TOOLS_CALL = "tools/call"
|
||||
)
|
||||
|
||||
/* Empty result */
|
||||
|
||||
// EmptyResult represents a response that indicates success but carries no data.
|
||||
type EmptyResult jsonrpc.Result
|
||||
|
||||
/* Pagination */
|
||||
|
||||
// Cursor is an opaque token used to represent a cursor for pagination.
|
||||
type Cursor string
|
||||
|
||||
type PaginatedRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
// An opaque token representing the current pagination position.
|
||||
// If provided, the server should return results starting after this cursor.
|
||||
Cursor Cursor `json:"cursor,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type PaginatedResult struct {
|
||||
jsonrpc.Result
|
||||
// An opaque token representing the pagination position after the last returned result.
|
||||
// If present, there may be more results available.
|
||||
NextCursor Cursor `json:"nextCursor,omitempty"`
|
||||
}
|
||||
|
||||
/* Tools */
|
||||
|
||||
// Sent from the client to request a list of tools the server has.
|
||||
type ListToolsRequest struct {
|
||||
PaginatedRequest
|
||||
}
|
||||
|
||||
// The server's response to a tools/list request from the client.
|
||||
type ListToolsResult struct {
|
||||
PaginatedResult
|
||||
Tools []tools.McpManifest `json:"tools"`
|
||||
}
|
||||
|
||||
// Used by the client to invoke a tool provided by the server.
|
||||
type CallToolRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
// The sender or recipient of messages and data in a conversation.
|
||||
type Role string
|
||||
|
||||
const (
|
||||
RoleUser Role = "user"
|
||||
RoleAssistant Role = "assistant"
|
||||
)
|
||||
|
||||
// Base for objects that include optional annotations for the client.
|
||||
// The client can use annotations to inform how objects are used or displayed
|
||||
type Annotated struct {
|
||||
Annotations *struct {
|
||||
// Describes who the intended customer of this object or data is.
|
||||
// It can include multiple entries to indicate content useful for multiple
|
||||
// audiences (e.g., `["user", "assistant"]`).
|
||||
Audience []Role `json:"audience,omitempty"`
|
||||
// Describes how important this data is for operating the server.
|
||||
//
|
||||
// A value of 1 means "most important," and indicates that the data is
|
||||
// effectively required, while 0 means "least important," and indicates that
|
||||
// the data is entirely optional.
|
||||
//
|
||||
// @TJS-type number
|
||||
// @minimum 0
|
||||
// @maximum 1
|
||||
Priority float64 `json:"priority,omitempty"`
|
||||
} `json:"annotations,omitempty"`
|
||||
}
|
||||
|
||||
// TextContent represents text provided to or from an LLM.
|
||||
type TextContent struct {
|
||||
Annotated
|
||||
Type string `json:"type"`
|
||||
// The text content of the message.
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// The server's response to a tool call.
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
type CallToolResult struct {
|
||||
jsonrpc.Result
|
||||
// Could be either a TextContent, ImageContent, or EmbeddedResources
|
||||
// For Toolbox, we will only be sending TextContent
|
||||
Content []TextContent `json:"content"`
|
||||
// Whether the tool call ended in an error.
|
||||
// If not set, this is assumed to be false (the call was successful).
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
IsError bool `json:"isError,omitempty"`
|
||||
// An optional JSON object that represents the structured result of the tool call.
|
||||
StructuredContent map[string]any `json:"structuredContent,omitempty"`
|
||||
}
|
||||
|
||||
// Additional properties describing a Tool to clients.
|
||||
//
|
||||
// NOTE: all properties in ToolAnnotations are **hints**.
|
||||
// They are not guaranteed to provide a faithful description of
|
||||
// tool behavior (including descriptive properties like `title`).
|
||||
//
|
||||
// Clients should never make tool use decisions based on ToolAnnotations
|
||||
// received from untrusted servers.
|
||||
type ToolAnnotations struct {
|
||||
// A human-readable title for the tool.
|
||||
Title string `json:"title,omitempty"`
|
||||
// If true, the tool does not modify its environment.
|
||||
// Default: false
|
||||
ReadOnlyHint bool `json:"readOnlyHint,omitempty"`
|
||||
// If true, the tool may perform destructive updates to its environment.
|
||||
// If false, the tool performs only additive updates.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: true
|
||||
DestructiveHint bool `json:"destructiveHint,omitempty"`
|
||||
// If true, calling the tool repeatedly with the same arguments
|
||||
// will have no additional effect on the its environment.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: false
|
||||
IdempotentHint bool `json:"idempotentHint,omitempty"`
|
||||
// If true, this tool may interact with an "open world" of external
|
||||
// entities. If false, the tool's domain of interaction is closed.
|
||||
// For example, the world of a web search tool is open, whereas that
|
||||
// of a memory tool is not.
|
||||
// Default: true
|
||||
OpenWorldHint bool `json:"openWorldHint,omitempty"`
|
||||
}
|
||||
@@ -36,6 +36,7 @@ import (
|
||||
const jsonrpcVersion = "2.0"
|
||||
const protocolVersion20241105 = "2024-11-05"
|
||||
const protocolVersion20250326 = "2025-03-26"
|
||||
const protocolVersion20250618 = "2025-06-18"
|
||||
const serverName = "Toolbox"
|
||||
|
||||
var tool1InputSchema = map[string]any{
|
||||
@@ -254,7 +255,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
initWant map[string]any
|
||||
}{
|
||||
{
|
||||
name: "verson 2024-11-05",
|
||||
name: "version 2024-11-05",
|
||||
protocol: protocolVersion20241105,
|
||||
idHeader: false,
|
||||
initWant: map[string]any{
|
||||
@@ -270,7 +271,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "verson 2025-03-26",
|
||||
name: "version 2025-03-26",
|
||||
protocol: protocolVersion20250326,
|
||||
idHeader: true,
|
||||
initWant: map[string]any{
|
||||
@@ -285,6 +286,22 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "version 2025-06-18",
|
||||
protocol: protocolVersion20250618,
|
||||
idHeader: false,
|
||||
initWant: map[string]any{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "mcp-initialize",
|
||||
"result": map[string]any{
|
||||
"protocolVersion": "2025-06-18",
|
||||
"capabilities": map[string]any{
|
||||
"tools": map[string]any{"listChanged": false},
|
||||
},
|
||||
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, vtc := range versTestCases {
|
||||
t.Run(vtc.name, func(t *testing.T) {
|
||||
@@ -295,6 +312,10 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
header["Mcp-Session-Id"] = sessionId
|
||||
}
|
||||
|
||||
if vtc.protocol == protocolVersion20250618 {
|
||||
header["MCP-Protocol-Version"] = vtc.protocol
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
url string
|
||||
@@ -481,7 +502,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
t.Fatalf("unexpected error during marshaling of body")
|
||||
}
|
||||
|
||||
if vtc.protocol == protocolVersion20250326 && len(header) == 0 {
|
||||
if vtc.protocol != protocolVersion20241105 && len(header) == 0 {
|
||||
t.Fatalf("header is missing")
|
||||
}
|
||||
|
||||
@@ -514,6 +535,33 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestInvalidProtocolVersionHeader(t *testing.T) {
|
||||
toolsMap, toolsets := map[string]tools.Tool{}, map[string]tools.Toolset{}
|
||||
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
|
||||
defer shutdown()
|
||||
ts := runServer(r, false)
|
||||
defer ts.Close()
|
||||
|
||||
header := map[string]string{}
|
||||
header["MCP-Protocol-Version"] = "foo"
|
||||
|
||||
resp, body, err := runRequest(ts, http.MethodPost, "/", nil, header)
|
||||
if resp.Status != "400 Bad Request" {
|
||||
t.Fatalf("unexpected status: %s", resp.Status)
|
||||
}
|
||||
var got map[string]any
|
||||
if err := json.Unmarshal(body, &got); err != nil {
|
||||
t.Fatalf("unexpected error unmarshalling body: %s", err)
|
||||
}
|
||||
want := "invalid protocol version: foo"
|
||||
if got["error"] != want {
|
||||
t.Fatalf("unexpected error message: got %s, want %s", got["error"], want)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error during request: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDeleteEndpoint(t *testing.T) {
|
||||
toolsMap, toolsets := map[string]tools.Tool{}, map[string]tools.Toolset{}
|
||||
r, shutdown := setUpServer(t, "mcp", toolsMap, toolsets)
|
||||
@@ -693,14 +741,22 @@ func TestStdioSession(t *testing.T) {
|
||||
}
|
||||
}()
|
||||
|
||||
instrumentation, err := CreateTelemetryInstrumentation(fakeVersionString)
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(fakeVersionString)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create custom metrics: %s", err)
|
||||
}
|
||||
|
||||
sseManager := newSseManager(ctx)
|
||||
|
||||
server := &Server{version: fakeVersionString, logger: testLogger, instrumentation: instrumentation, sseManager: sseManager, tools: toolsMap, toolsets: toolsets}
|
||||
resourceManager := NewResourceManager(nil, nil, toolsMap, toolsets)
|
||||
|
||||
server := &Server{
|
||||
version: fakeVersionString,
|
||||
logger: testLogger,
|
||||
instrumentation: instrumentation,
|
||||
sseManager: sseManager,
|
||||
ResourceMgr: resourceManager,
|
||||
}
|
||||
|
||||
in := bufio.NewReader(pr)
|
||||
stdioSession := NewStdioSession(server, in, pw)
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"net"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/go-chi/chi/v5"
|
||||
@@ -29,6 +30,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
@@ -42,26 +44,225 @@ type Server struct {
|
||||
listener net.Listener
|
||||
root chi.Router
|
||||
logger log.Logger
|
||||
instrumentation *Instrumentation
|
||||
instrumentation *telemetry.Instrumentation
|
||||
sseManager *sseManager
|
||||
ResourceMgr *ResourceManager
|
||||
}
|
||||
|
||||
// ResourceManager contains available resources for the server. Should be initialized with NewResourceManager().
|
||||
type ResourceManager struct {
|
||||
mu sync.RWMutex
|
||||
sources map[string]sources.Source
|
||||
authServices map[string]auth.AuthService
|
||||
tools map[string]tools.Tool
|
||||
toolsets map[string]tools.Toolset
|
||||
}
|
||||
|
||||
// NewServer returns a Server object based on provided Config.
|
||||
func NewServer(ctx context.Context, cfg ServerConfig, l log.Logger) (*Server, error) {
|
||||
instrumentation, err := CreateTelemetryInstrumentation(cfg.Version)
|
||||
func NewResourceManager(
|
||||
sourcesMap map[string]sources.Source,
|
||||
authServicesMap map[string]auth.AuthService,
|
||||
toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset,
|
||||
) *ResourceManager {
|
||||
resourceMgr := &ResourceManager{
|
||||
mu: sync.RWMutex{},
|
||||
sources: sourcesMap,
|
||||
authServices: authServicesMap,
|
||||
tools: toolsMap,
|
||||
toolsets: toolsetsMap,
|
||||
}
|
||||
|
||||
return resourceMgr
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetSource(sourceName string) (sources.Source, bool) {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
source, ok := r.sources[sourceName]
|
||||
return source, ok
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetAuthService(authServiceName string) (auth.AuthService, bool) {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
authService, ok := r.authServices[authServiceName]
|
||||
return authService, ok
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetTool(toolName string) (tools.Tool, bool) {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
tool, ok := r.tools[toolName]
|
||||
return tool, ok
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetToolset(toolsetName string) (tools.Toolset, bool) {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
toolset, ok := r.toolsets[toolsetName]
|
||||
return toolset, ok
|
||||
}
|
||||
|
||||
func (r *ResourceManager) SetResources(sourcesMap map[string]sources.Source, authServicesMap map[string]auth.AuthService, toolsMap map[string]tools.Tool, toolsetsMap map[string]tools.Toolset) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
r.sources = sourcesMap
|
||||
r.authServices = authServicesMap
|
||||
r.tools = toolsMap
|
||||
r.toolsets = toolsetsMap
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetAuthServiceMap() map[string]auth.AuthService {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
return r.authServices
|
||||
}
|
||||
|
||||
func (r *ResourceManager) GetToolsMap() map[string]tools.Tool {
|
||||
r.mu.RLock()
|
||||
defer r.mu.RUnlock()
|
||||
return r.tools
|
||||
}
|
||||
|
||||
func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
map[string]sources.Source,
|
||||
map[string]auth.AuthService,
|
||||
map[string]tools.Tool,
|
||||
map[string]tools.Toolset,
|
||||
error,
|
||||
) {
|
||||
ctx = util.WithUserAgent(ctx, cfg.Version)
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create telemetry instrumentation: %w", err)
|
||||
panic(err)
|
||||
}
|
||||
|
||||
l, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// initialize and validate the sources from configs
|
||||
sourcesMap := make(map[string]sources.Source)
|
||||
for name, sc := range cfg.SourceConfigs {
|
||||
s, err := func() (sources.Source, error) {
|
||||
childCtx, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/source/init",
|
||||
trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("source_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
s, err := sc.Initialize(childCtx, instrumentation.Tracer)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize source %q: %w", name, err)
|
||||
}
|
||||
return s, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
sourcesMap[name] = s
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d sources.", len(sourcesMap)))
|
||||
|
||||
// initialize and validate the auth services from configs
|
||||
authServicesMap := make(map[string]auth.AuthService)
|
||||
for name, sc := range cfg.AuthServiceConfigs {
|
||||
a, err := func() (auth.AuthService, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/auth/init",
|
||||
trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("auth_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
a, err := sc.Initialize()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize auth service %q: %w", name, err)
|
||||
}
|
||||
return a, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
authServicesMap[name] = a
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d authServices.", len(authServicesMap)))
|
||||
|
||||
// initialize and validate the tools from configs
|
||||
toolsMap := make(map[string]tools.Tool)
|
||||
for name, tc := range cfg.ToolConfigs {
|
||||
t, err := func() (tools.Tool, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/tool/init",
|
||||
trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())),
|
||||
trace.WithAttributes(attribute.String("tool_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
t, err := tc.Initialize(sourcesMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize tool %q: %w", name, err)
|
||||
}
|
||||
return t, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
toolsMap[name] = t
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d tools.", len(toolsMap)))
|
||||
|
||||
// create a default toolset that contains all tools
|
||||
allToolNames := make([]string, 0, len(toolsMap))
|
||||
for name := range toolsMap {
|
||||
allToolNames = append(allToolNames, name)
|
||||
}
|
||||
if cfg.ToolsetConfigs == nil {
|
||||
cfg.ToolsetConfigs = make(ToolsetConfigs)
|
||||
}
|
||||
cfg.ToolsetConfigs[""] = tools.ToolsetConfig{Name: "", ToolNames: allToolNames}
|
||||
|
||||
// initialize and validate the toolsets from configs
|
||||
toolsetsMap := make(map[string]tools.Toolset)
|
||||
for name, tc := range cfg.ToolsetConfigs {
|
||||
t, err := func() (tools.Toolset, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/toolset/init",
|
||||
trace.WithAttributes(attribute.String("toolset_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
t, err := tc.Initialize(cfg.Version, toolsMap)
|
||||
if err != nil {
|
||||
return tools.Toolset{}, fmt.Errorf("unable to initialize toolset %q: %w", name, err)
|
||||
}
|
||||
return t, err
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, nil, nil, nil, err
|
||||
}
|
||||
toolsetsMap[name] = t
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d toolsets.", len(toolsetsMap)))
|
||||
|
||||
return sourcesMap, authServicesMap, toolsMap, toolsetsMap, nil
|
||||
}
|
||||
|
||||
// NewServer returns a Server object based on provided Config.
|
||||
func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
ctx, span := instrumentation.Tracer.Start(ctx, "toolbox/server/init")
|
||||
defer span.End()
|
||||
|
||||
ctx = util.WithUserAgent(ctx, cfg.Version)
|
||||
l, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// set up http serving
|
||||
r := chi.NewRouter()
|
||||
@@ -97,116 +298,18 @@ func NewServer(ctx context.Context, cfg ServerConfig, l log.Logger) (*Server, er
|
||||
httpLogger := httplog.NewLogger("httplog", httpOpts)
|
||||
r.Use(httplog.RequestLogger(httpLogger))
|
||||
|
||||
// initialize and validate the sources from configs
|
||||
sourcesMap := make(map[string]sources.Source)
|
||||
for name, sc := range cfg.SourceConfigs {
|
||||
s, err := func() (sources.Source, error) {
|
||||
childCtx, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/source/init",
|
||||
trace.WithAttributes(attribute.String("source_kind", sc.SourceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("source_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
s, err := sc.Initialize(childCtx, instrumentation.Tracer)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize source %q: %w", name, err)
|
||||
}
|
||||
return s, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sourcesMap[name] = s
|
||||
sourcesMap, authServicesMap, toolsMap, toolsetsMap, err := InitializeConfigs(ctx, cfg)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize configs: %w", err)
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d sources.", len(sourcesMap)))
|
||||
|
||||
// initialize and validate the auth services from configs
|
||||
authServicesMap := make(map[string]auth.AuthService)
|
||||
for name, sc := range cfg.AuthServiceConfigs {
|
||||
a, err := func() (auth.AuthService, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/auth/init",
|
||||
trace.WithAttributes(attribute.String("auth_kind", sc.AuthServiceConfigKind())),
|
||||
trace.WithAttributes(attribute.String("auth_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
a, err := sc.Initialize()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize auth service %q: %w", name, err)
|
||||
}
|
||||
return a, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
authServicesMap[name] = a
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d authServices.", len(authServicesMap)))
|
||||
|
||||
// initialize and validate the tools from configs
|
||||
toolsMap := make(map[string]tools.Tool)
|
||||
for name, tc := range cfg.ToolConfigs {
|
||||
t, err := func() (tools.Tool, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/tool/init",
|
||||
trace.WithAttributes(attribute.String("tool_kind", tc.ToolConfigKind())),
|
||||
trace.WithAttributes(attribute.String("tool_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
t, err := tc.Initialize(sourcesMap)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to initialize tool %q: %w", name, err)
|
||||
}
|
||||
return t, nil
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
toolsMap[name] = t
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d tools.", len(toolsMap)))
|
||||
|
||||
// create a default toolset that contains all tools
|
||||
allToolNames := make([]string, 0, len(toolsMap))
|
||||
for name := range toolsMap {
|
||||
allToolNames = append(allToolNames, name)
|
||||
}
|
||||
if cfg.ToolsetConfigs == nil {
|
||||
cfg.ToolsetConfigs = make(ToolsetConfigs)
|
||||
}
|
||||
cfg.ToolsetConfigs[""] = tools.ToolsetConfig{Name: "", ToolNames: allToolNames}
|
||||
|
||||
// initialize and validate the toolsets from configs
|
||||
toolsetsMap := make(map[string]tools.Toolset)
|
||||
for name, tc := range cfg.ToolsetConfigs {
|
||||
t, err := func() (tools.Toolset, error) {
|
||||
_, span := instrumentation.Tracer.Start(
|
||||
ctx,
|
||||
"toolbox/server/toolset/init",
|
||||
trace.WithAttributes(attribute.String("toolset_name", name)),
|
||||
)
|
||||
defer span.End()
|
||||
t, err := tc.Initialize(cfg.Version, toolsMap)
|
||||
if err != nil {
|
||||
return tools.Toolset{}, fmt.Errorf("unable to initialize toolset %q: %w", name, err)
|
||||
}
|
||||
return t, err
|
||||
}()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
toolsetsMap[name] = t
|
||||
}
|
||||
l.InfoContext(ctx, fmt.Sprintf("Initialized %d toolsets.", len(toolsetsMap)))
|
||||
|
||||
addr := net.JoinHostPort(cfg.Address, strconv.Itoa(cfg.Port))
|
||||
srv := &http.Server{Addr: addr, Handler: r}
|
||||
|
||||
sseManager := newSseManager(ctx)
|
||||
|
||||
resourceManager := NewResourceManager(sourcesMap, authServicesMap, toolsMap, toolsetsMap)
|
||||
|
||||
s := &Server{
|
||||
version: cfg.Version,
|
||||
srv: srv,
|
||||
@@ -214,11 +317,7 @@ func NewServer(ctx context.Context, cfg ServerConfig, l log.Logger) (*Server, er
|
||||
logger: l,
|
||||
instrumentation: instrumentation,
|
||||
sseManager: sseManager,
|
||||
|
||||
sources: sourcesMap,
|
||||
authServices: authServicesMap,
|
||||
tools: toolsMap,
|
||||
toolsets: toolsetsMap,
|
||||
ResourceMgr: resourceManager,
|
||||
}
|
||||
// control plane
|
||||
apiR, err := apiRouter(s)
|
||||
|
||||
@@ -23,9 +23,16 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/auth"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
func TestServe(t *testing.T) {
|
||||
@@ -54,8 +61,16 @@ func TestServe(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
ctx = util.WithLogger(ctx, testLogger)
|
||||
|
||||
s, err := server.NewServer(ctx, cfg, testLogger)
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(cfg.Version)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
s, err := server.NewServer(ctx, cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to initialize server: %v", err)
|
||||
}
|
||||
@@ -93,3 +108,67 @@ func TestServe(t *testing.T) {
|
||||
t.Fatalf("version missing from output: %q", got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateServer(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("error setting up logger: %s", err)
|
||||
}
|
||||
|
||||
addr, port := "127.0.0.1", 5000
|
||||
cfg := server.ServerConfig{
|
||||
Version: "0.0.0",
|
||||
Address: addr,
|
||||
Port: port,
|
||||
}
|
||||
|
||||
instrumentation, err := telemetry.CreateTelemetryInstrumentation(cfg.Version)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
|
||||
ctx = util.WithInstrumentation(ctx, instrumentation)
|
||||
|
||||
s, err := server.NewServer(ctx, cfg)
|
||||
if err != nil {
|
||||
t.Fatalf("error setting up server: %s", err)
|
||||
}
|
||||
|
||||
newSources := map[string]sources.Source{
|
||||
"example-source": &alloydbpg.Source{
|
||||
Name: "example-alloydb-source",
|
||||
Kind: "alloydb-postgres",
|
||||
},
|
||||
}
|
||||
newAuth := map[string]auth.AuthService{"example-auth": nil}
|
||||
newTools := map[string]tools.Tool{"example-tool": nil}
|
||||
newToolsets := map[string]tools.Toolset{
|
||||
"example-toolset": {
|
||||
Name: "example-toolset", Tools: []*tools.Tool{},
|
||||
},
|
||||
}
|
||||
s.ResourceMgr.SetResources(newSources, newAuth, newTools, newToolsets)
|
||||
if err != nil {
|
||||
t.Errorf("error updating server: %s", err)
|
||||
}
|
||||
|
||||
gotSource, _ := s.ResourceMgr.GetSource("example-source")
|
||||
if diff := cmp.Diff(gotSource, newSources["example-source"]); diff != "" {
|
||||
t.Errorf("error updating server, sources (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
gotAuthService, _ := s.ResourceMgr.GetAuthService("example-auth")
|
||||
if diff := cmp.Diff(gotAuthService, newAuth["example-auth"]); diff != "" {
|
||||
t.Errorf("error updating server, authServices (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
gotTool, _ := s.ResourceMgr.GetTool("example-tool")
|
||||
if diff := cmp.Diff(gotTool, newTools["example-tool"]); diff != "" {
|
||||
t.Errorf("error updating server, tools (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
gotToolset, _ := s.ResourceMgr.GetToolset("example-toolset")
|
||||
if diff := cmp.Diff(gotToolset, newToolsets["example-toolset"]); diff != "" {
|
||||
t.Errorf("error updating server, toolset (-want +got):\n%s", diff)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"golang.org/x/oauth2/google"
|
||||
bigqueryrestapi "google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
@@ -61,15 +62,17 @@ func (r Config) SourceConfigKind() string {
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
// Initializes a BigQuery Google SQL source
|
||||
client, err := initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location)
|
||||
client, restService, err := initBigQueryConnection(ctx, tracer, r.Name, r.Project, r.Location)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
Client: client,
|
||||
Location: r.Location,
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
Client: client,
|
||||
RestService: restService,
|
||||
Location: r.Location,
|
||||
}
|
||||
return s, nil
|
||||
|
||||
@@ -79,10 +82,11 @@ var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
// BigQuery Google SQL struct with client
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Client *bigqueryapi.Client
|
||||
Location string `yaml:"location"`
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
Location string `yaml:"location"`
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
@@ -94,30 +98,42 @@ func (s *Source) BigQueryClient() *bigqueryapi.Client {
|
||||
return s.Client
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryRestService() *bigqueryrestapi.Service {
|
||||
return s.RestService
|
||||
}
|
||||
|
||||
func initBigQueryConnection(
|
||||
ctx context.Context,
|
||||
tracer trace.Tracer,
|
||||
name string,
|
||||
project string,
|
||||
location string,
|
||||
) (*bigqueryapi.Client, error) {
|
||||
) (*bigqueryapi.Client, *bigqueryrestapi.Service, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
|
||||
return nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
|
||||
}
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Initialize the high-level BigQuery client
|
||||
client, err := bigqueryapi.NewClient(ctx, project, option.WithUserAgent(userAgent), option.WithCredentials(cred))
|
||||
client.Location = location
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
|
||||
return nil, nil, fmt.Errorf("failed to create BigQuery client for project %q: %w", project, err)
|
||||
}
|
||||
return client, nil
|
||||
client.Location = location
|
||||
|
||||
// Initialize the low-level BigQuery REST service using the same credentials
|
||||
restService, err := bigqueryrestapi.NewService(ctx, option.WithUserAgent(userAgent), option.WithCredentials(cred))
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("failed to create BigQuery v2 service: %w", err)
|
||||
}
|
||||
|
||||
return client, restService, nil
|
||||
}
|
||||
|
||||
153
internal/sources/firestore/firestore.go
Normal file
153
internal/sources/firestore/firestore.go
Normal file
@@ -0,0 +1,153 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestore
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"cloud.google.com/go/firestore"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"google.golang.org/api/firebaserules/v1"
|
||||
"google.golang.org/api/option"
|
||||
)
|
||||
|
||||
const SourceKind string = "firestore"
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
func init() {
|
||||
if !sources.Register(SourceKind, newConfig) {
|
||||
panic(fmt.Sprintf("source kind %q already registered", SourceKind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources.SourceConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
// Firestore configs
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Database string `yaml:"database"` // Optional, defaults to "(default)"
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
// Returns Firestore source kind
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
// Initializes a Firestore source
|
||||
client, err := initFirestoreConnection(ctx, tracer, r.Name, r.Project, r.Database)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Initialize Firebase Rules client
|
||||
rulesClient, err := initFirebaseRulesConnection(ctx, r.Project)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to initialize Firebase Rules client: %w", err)
|
||||
}
|
||||
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
Client: client,
|
||||
RulesClient: rulesClient,
|
||||
ProjectId: r.Project,
|
||||
}
|
||||
return s, nil
|
||||
}
|
||||
|
||||
var _ sources.Source = &Source{}
|
||||
|
||||
type Source struct {
|
||||
// Firestore struct with client
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
Client *firestore.Client
|
||||
RulesClient *firebaserules.Service
|
||||
ProjectId string `yaml:"projectId"`
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
// Returns Firestore source kind
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) FirestoreClient() *firestore.Client {
|
||||
return s.Client
|
||||
}
|
||||
|
||||
func (s *Source) FirebaseRulesClient() *firebaserules.Service {
|
||||
return s.RulesClient
|
||||
}
|
||||
|
||||
func (s *Source) GetProjectId() string {
|
||||
return s.ProjectId
|
||||
}
|
||||
|
||||
func initFirestoreConnection(
|
||||
ctx context.Context,
|
||||
tracer trace.Tracer,
|
||||
name string,
|
||||
project string,
|
||||
database string,
|
||||
) (*firestore.Client, error) {
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If database is not specified, use the default database
|
||||
if database == "" {
|
||||
database = "(default)"
|
||||
}
|
||||
|
||||
// Create the Firestore client
|
||||
client, err := firestore.NewClientWithDatabase(ctx, project, database, option.WithUserAgent(userAgent))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create Firestore client for project %q and database %q: %w", project, database, err)
|
||||
}
|
||||
|
||||
return client, nil
|
||||
}
|
||||
|
||||
func initFirebaseRulesConnection(
|
||||
ctx context.Context,
|
||||
project string,
|
||||
) (*firebaserules.Service, error) {
|
||||
// Create the Firebase Rules client
|
||||
rulesClient, err := firebaserules.NewService(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create Firebase Rules client for project %q: %w", project, err)
|
||||
}
|
||||
|
||||
return rulesClient, nil
|
||||
}
|
||||
130
internal/sources/firestore/firestore_test.go
Normal file
130
internal/sources/firestore/firestore_test.go
Normal file
@@ -0,0 +1,130 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestore_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestore(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.SourceConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example with default database",
|
||||
in: `
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
project: my-project
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-firestore": firestore.Config{
|
||||
Name: "my-firestore",
|
||||
Kind: firestore.SourceKind,
|
||||
Project: "my-project",
|
||||
Database: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with custom database",
|
||||
in: `
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
project: my-project
|
||||
database: my-database
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-firestore": firestore.Config{
|
||||
Name: "my-firestore",
|
||||
Kind: firestore.SourceKind,
|
||||
Project: "my-project",
|
||||
Database: "my-database",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if !cmp.Equal(tc.want, got.Sources) {
|
||||
t.Fatalf("incorrect parse: want %v, got %v", tc.want, got.Sources)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlFirestore(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "extra field",
|
||||
in: `
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
project: my-project
|
||||
foo: bar
|
||||
`,
|
||||
err: "unable to parse source \"my-firestore\" as \"firestore\": [1:1] unknown field \"foo\"\n> 1 | foo: bar\n ^\n 2 | kind: firestore\n 3 | project: my-project",
|
||||
},
|
||||
{
|
||||
desc: "missing required field",
|
||||
in: `
|
||||
sources:
|
||||
my-firestore:
|
||||
kind: firestore
|
||||
database: my-database
|
||||
`,
|
||||
err: "unable to parse source \"my-firestore\" as \"firestore\": Key: 'Config.Project' Error:Field validation for 'Project' failed on the 'required' tag",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Sources server.SourceConfigs `yaml:"sources"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.Unmarshal(testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if errStr != tc.err {
|
||||
t.Fatalf("unexpected error: got %q, want %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
"github.com/goccy/go-yaml"
|
||||
@@ -45,13 +46,14 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Host string `yaml:"host" validate:"required"`
|
||||
Port string `yaml:"port" validate:"required"`
|
||||
User string `yaml:"user" validate:"required"`
|
||||
Password string `yaml:"password" validate:"required"`
|
||||
Database string `yaml:"database" validate:"required"`
|
||||
QueryTimeout string `yaml:"queryTimeout"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -59,7 +61,7 @@ func (r Config) SourceConfigKind() string {
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
pool, err := initMySQLConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database)
|
||||
pool, err := initMySQLConnectionPool(ctx, tracer, r.Name, r.Host, r.Port, r.User, r.Password, r.Database, r.QueryTimeout)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to create pool: %w", err)
|
||||
}
|
||||
@@ -93,7 +95,7 @@ func (s *Source) MySQLPool() *sql.DB {
|
||||
return s.Pool
|
||||
}
|
||||
|
||||
func initMySQLConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname string) (*sql.DB, error) {
|
||||
func initMySQLConnectionPool(ctx context.Context, tracer trace.Tracer, name, host, port, user, pass, dbname, queryTimeout string) (*sql.DB, error) {
|
||||
//nolint:all // Reassigned ctx
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
@@ -101,6 +103,15 @@ func initMySQLConnectionPool(ctx context.Context, tracer trace.Tracer, name, hos
|
||||
// Configure the driver to connect to the database
|
||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?parseTime=true", user, pass, host, port, dbname)
|
||||
|
||||
// Add query timeout to DSN if specified
|
||||
if queryTimeout != "" {
|
||||
timeout, err := time.ParseDuration(queryTimeout)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid queryTimeout %q: %w", queryTimeout, err)
|
||||
}
|
||||
dsn += "&readTimeout=" + timeout.String()
|
||||
}
|
||||
|
||||
// Interact with the driver directly as you normally would
|
||||
pool, err := sql.Open("mysql", dsn)
|
||||
if err != nil {
|
||||
|
||||
@@ -54,6 +54,32 @@ func TestParseFromYamlCloudSQLMySQL(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with query timeout",
|
||||
in: `
|
||||
sources:
|
||||
my-mysql-instance:
|
||||
kind: mysql
|
||||
host: 0.0.0.0
|
||||
port: my-port
|
||||
database: my_db
|
||||
user: my_user
|
||||
password: my_pass
|
||||
queryTimeout: 45s
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-mysql-instance": mysql.Config{
|
||||
Name: "my-mysql-instance",
|
||||
Kind: mysql.SourceKind,
|
||||
Host: "0.0.0.0",
|
||||
Port: "my-port",
|
||||
Database: "my_db",
|
||||
User: "my_user",
|
||||
Password: "my_pass",
|
||||
QueryTimeout: "45s",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package server
|
||||
package telemetry
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@@ -28,7 +28,7 @@ import (
|
||||
"go.opentelemetry.io/otel/sdk/metric"
|
||||
"go.opentelemetry.io/otel/sdk/resource"
|
||||
tracesdk "go.opentelemetry.io/otel/sdk/trace"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.26.0"
|
||||
semconv "go.opentelemetry.io/otel/semconv/v1.34.0"
|
||||
)
|
||||
|
||||
// setupOTelSDK bootstraps the OpenTelemetry pipeline.
|
||||
|
||||
@@ -15,9 +15,12 @@
|
||||
package testutils
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
@@ -42,3 +45,63 @@ func ContextWithNewLogger() (context.Context, error) {
|
||||
}
|
||||
return util.WithLogger(ctx, logger), nil
|
||||
}
|
||||
|
||||
// WaitForString waits until the server logs a single line that matches the provided regex.
|
||||
// returns the output of whatever the server sent so far.
|
||||
func WaitForString(ctx context.Context, re *regexp.Regexp, pr io.ReadCloser) (string, error) {
|
||||
in := bufio.NewReader(pr)
|
||||
ctx, cancel := context.WithCancel(ctx)
|
||||
defer cancel()
|
||||
|
||||
// read lines in background, sending result of each read over a channel
|
||||
// this allows us to use in.ReadString without blocking
|
||||
type result struct {
|
||||
s string
|
||||
err error
|
||||
}
|
||||
output := make(chan result)
|
||||
go func() {
|
||||
defer close(output)
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
// if the context is canceled, the orig thread will send back the error
|
||||
// so we can just exit the goroutine here
|
||||
return
|
||||
default:
|
||||
// otherwise read a line from the output
|
||||
s, err := in.ReadString('\n')
|
||||
if err != nil {
|
||||
output <- result{err: err}
|
||||
return
|
||||
}
|
||||
output <- result{s: s}
|
||||
// if that last string matched, exit the goroutine
|
||||
if re.MatchString(s) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// collect the output until the ctx is canceled, an error was hit,
|
||||
// or match was found (which is indicated the channel is closed)
|
||||
var sb strings.Builder
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
// if ctx is done, return that error
|
||||
return sb.String(), ctx.Err()
|
||||
case o, ok := <-output:
|
||||
if !ok {
|
||||
// match was found!
|
||||
return sb.String(), nil
|
||||
}
|
||||
if o.err != nil {
|
||||
// error was found!
|
||||
return sb.String(), o.err
|
||||
}
|
||||
sb.WriteString(o.s)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -156,7 +156,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
sliceParams := params.AsSlice()
|
||||
allParamValues := make([]any, len(sliceParams)+1)
|
||||
allParamValues[0] = fmt.Sprintf("%s", sliceParams[0]) // nl_question
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bigqueryrestapi "google.golang.org/api/bigquery/v2"
|
||||
"google.golang.org/api/iterator"
|
||||
)
|
||||
|
||||
@@ -44,6 +45,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -95,6 +97,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
@@ -110,29 +113,58 @@ type Tool struct {
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
sliceParams := params.AsSlice()
|
||||
sql, ok := sliceParams[0].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
|
||||
}
|
||||
|
||||
dryRunJob, err := dryRunQuery(ctx, t.RestService, t.Client.Project(), t.Client.Location, sql)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed during dry run: %w", err)
|
||||
}
|
||||
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
// JobStatistics.QueryStatistics.StatementType
|
||||
query := t.Client.Query(sql)
|
||||
query.Location = t.Client.Location
|
||||
|
||||
// This block handles Data Manipulation Language (DML) and Data Definition Language (DDL) statements.
|
||||
// These statements (e.g., INSERT, UPDATE, CREATE TABLE) do not return a row set.
|
||||
// Instead, we execute them as a job, wait for completion, and return a success
|
||||
// message, including the number of affected rows for DML operations.
|
||||
if statementType != "SELECT" {
|
||||
job, err := query.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to start DML/DDL job: %w", err)
|
||||
}
|
||||
status, err := job.Wait(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to wait for DML/DDL job to complete: %w", err)
|
||||
}
|
||||
if err := status.Err(); err != nil {
|
||||
return nil, fmt.Errorf("DML/DDL job failed with error: %w", err)
|
||||
}
|
||||
return "Operation completed successfully.", nil
|
||||
}
|
||||
|
||||
// This block handles SELECT statements, which return a row set.
|
||||
// We iterate through the results, convert each row into a map of
|
||||
// column names to values, and return the collection of rows.
|
||||
var out []any
|
||||
it, err := query.Read(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
|
||||
var out []any
|
||||
for {
|
||||
var row map[string]bigqueryapi.Value
|
||||
err := it.Next(&row)
|
||||
err = it.Next(&row)
|
||||
if err == iterator.Done {
|
||||
break
|
||||
}
|
||||
@@ -145,7 +177,9 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, erro
|
||||
}
|
||||
out = append(out, vMap)
|
||||
}
|
||||
|
||||
if out == nil {
|
||||
return "The query returned 0 rows.", nil
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
@@ -164,3 +198,27 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
// dryRunQuery performs a dry run of the SQL query to validate it and get metadata.
|
||||
func dryRunQuery(ctx context.Context, restService *bigqueryrestapi.Service, projectID string, location string, sql string) (*bigqueryrestapi.Job, error) {
|
||||
useLegacySql := false
|
||||
jobToInsert := &bigqueryrestapi.Job{
|
||||
JobReference: &bigqueryrestapi.JobReference{
|
||||
ProjectId: projectID,
|
||||
Location: location,
|
||||
},
|
||||
Configuration: &bigqueryrestapi.JobConfiguration{
|
||||
DryRun: true,
|
||||
Query: &bigqueryrestapi.JobConfigurationQuery{
|
||||
Query: sql,
|
||||
UseLegacySql: &useLegacySql,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
insertResponse, err := restService.Jobs.Insert(projectID, jobToInsert).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to insert dry run job: %w", err)
|
||||
}
|
||||
return insertResponse, nil
|
||||
}
|
||||
|
||||
@@ -26,6 +26,8 @@ import (
|
||||
)
|
||||
|
||||
const kind string = "bigquery-get-dataset-info"
|
||||
const projectKey string = "project"
|
||||
const datasetKey string = "dataset"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
@@ -78,8 +80,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
datasetParameter := tools.NewStringParameter("dataset", "The dataset to get metadata information.")
|
||||
parameters := tools.Parameters{datasetParameter}
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to get metadata information.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -115,22 +118,26 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
|
||||
sliceParams := params.AsSlice()
|
||||
datasetId, ok := sliceParams[0].(string)
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
projectId, ok := mapParams[projectKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.Dataset(datasetId)
|
||||
datasetId, ok := mapParams[datasetKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
|
||||
metadata, err := dsHandle.Metadata(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get metadata for dataset %s (in project %s): %w", datasetId, t.Client.Project(), err)
|
||||
}
|
||||
|
||||
return []any{metadata}, nil
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
|
||||
@@ -26,6 +26,9 @@ import (
|
||||
)
|
||||
|
||||
const kind string = "bigquery-get-table-info"
|
||||
const projectKey string = "project"
|
||||
const datasetKey string = "dataset"
|
||||
const tableKey string = "table"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
@@ -78,9 +81,10 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
datasetParameter := tools.NewStringParameter("dataset", "The table's parent dataset.")
|
||||
tableParameter := tools.NewStringParameter("table", "The table to get metadata information.")
|
||||
parameters := tools.Parameters{datasetParameter, tableParameter}
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset and table.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The table's parent dataset.")
|
||||
tableParameter := tools.NewStringParameter(tableKey, "The table to get metadata information.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter, tableParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -116,26 +120,32 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
sliceParams := params.AsSlice()
|
||||
datasetId, ok := sliceParams[0].(string)
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
projectId, ok := mapParams[projectKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
|
||||
}
|
||||
tableId, ok := sliceParams[1].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[1])
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.Dataset(datasetId)
|
||||
datasetId, ok := mapParams[datasetKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
|
||||
}
|
||||
|
||||
tableId, ok := mapParams[tableKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", tableKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
tableHandle := dsHandle.Table(tableId)
|
||||
|
||||
metadata, err := tableHandle.Metadata(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get metadata for table %s.%s.%s: %w", t.Client.Project(), datasetId, tableId, err)
|
||||
return nil, fmt.Errorf("failed to get metadata for table %s.%s.%s: %w", projectId, datasetId, tableId, err)
|
||||
}
|
||||
|
||||
return []any{metadata}, nil
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
|
||||
@@ -27,6 +27,7 @@ import (
|
||||
)
|
||||
|
||||
const kind string = "bigquery-list-dataset-ids"
|
||||
const projectKey string = "project"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
@@ -79,7 +80,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
parameters := tools.Parameters{}
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project to list dataset ids.")
|
||||
|
||||
parameters := tools.Parameters{projectParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -115,8 +118,14 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
projectId, ok := mapParams[projectKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
|
||||
}
|
||||
datasetIterator := t.Client.Datasets(ctx)
|
||||
datasetIterator.ProjectID = projectId
|
||||
|
||||
var datasetIds []any
|
||||
for {
|
||||
|
||||
@@ -27,6 +27,8 @@ import (
|
||||
)
|
||||
|
||||
const kind string = "bigquery-list-table-ids"
|
||||
const projectKey string = "project"
|
||||
const datasetKey string = "dataset"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
@@ -79,8 +81,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
datasetParameter := tools.NewStringParameter("dataset", "The dataset to list table ids.")
|
||||
parameters := tools.Parameters{datasetParameter}
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to list table ids.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
@@ -116,15 +119,19 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
|
||||
sliceParams := params.AsSlice()
|
||||
datasetId, ok := sliceParams[0].(string)
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
projectId, ok := mapParams[projectKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.Dataset(datasetId)
|
||||
datasetId, ok := mapParams[datasetKey].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
|
||||
}
|
||||
|
||||
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
|
||||
|
||||
var tableIds []any
|
||||
tableIterator := dsHandle.Tables(ctx)
|
||||
|
||||
@@ -124,7 +124,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
namedArgs := make([]bigqueryapi.QueryParameter, 0, len(params))
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
@@ -139,13 +139,17 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, erro
|
||||
// BigQuery's QueryParameter only accepts typed slices as input
|
||||
// This checks if the param is an array.
|
||||
// If yes, convert []any to typed slice (e.g []string, []int)
|
||||
switch arrayParam := value.(type) {
|
||||
case []any:
|
||||
switch arrayParam := p.(type) {
|
||||
case *tools.ArrayParameter:
|
||||
arrayParamValue, ok := value.([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unable to convert parameter `%s` to []any %w", name, err)
|
||||
}
|
||||
itemType := arrayParam.GetItems().GetType()
|
||||
var err error
|
||||
itemType := p.McpManifest().Items.Type
|
||||
value, err = convertAnySliceToTyped(arrayParam, itemType, name)
|
||||
value, err = tools.ConvertAnySliceToTyped(arrayParamValue, itemType)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to convert []any to typed slice: %w", err)
|
||||
return nil, fmt.Errorf("unable to convert parameter `%s` from []any to typed slice: %w", name, err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -205,47 +209,3 @@ func (t Tool) McpManifest() tools.McpManifest {
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func convertAnySliceToTyped(s []any, itemType, paramName string) (any, error) {
|
||||
var typedSlice any
|
||||
switch itemType {
|
||||
case "string":
|
||||
typedSlice := make([]string, len(s))
|
||||
for j, item := range s {
|
||||
if s, ok := item.(string); ok {
|
||||
typedSlice[j] = s
|
||||
} else {
|
||||
return nil, fmt.Errorf("parameter '%s': expected item at index %d to be string, got %T", paramName, j, item)
|
||||
}
|
||||
}
|
||||
case "integer":
|
||||
typedSlice := make([]int64, len(s))
|
||||
for j, item := range s {
|
||||
i, ok := item.(int)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("parameter '%s': expected item at index %d to be integer, got %T", paramName, j, item)
|
||||
}
|
||||
typedSlice[j] = int64(i)
|
||||
}
|
||||
case "float":
|
||||
typedSlice := make([]float64, len(s))
|
||||
for j, item := range s {
|
||||
if f, ok := item.(float64); ok {
|
||||
typedSlice[j] = f
|
||||
} else {
|
||||
return nil, fmt.Errorf("parameter '%s': expected item at index %d to be float, got %T", paramName, j, item)
|
||||
}
|
||||
}
|
||||
case "boolean":
|
||||
typedSlice := make([]bool, len(s))
|
||||
for j, item := range s {
|
||||
if b, ok := item.(bool); ok {
|
||||
typedSlice[j] = b
|
||||
} else {
|
||||
return nil, fmt.Errorf("parameter '%s': expected item at index %d to be boolean, got %T", paramName, j, item)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return typedSlice, nil
|
||||
}
|
||||
|
||||
@@ -122,32 +122,46 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func getMapParamsType(tparams tools.Parameters, params tools.ParamValues) (map[string]bigtable.SQLType, error) {
|
||||
paramTypeMap := make(map[string]string)
|
||||
for _, p := range tparams {
|
||||
paramTypeMap[p.GetName()] = p.GetType()
|
||||
func getBigtableType(paramType string) (bigtable.SQLType, error) {
|
||||
switch paramType {
|
||||
case "boolean":
|
||||
return bigtable.BoolSQLType{}, nil
|
||||
case "string":
|
||||
return bigtable.StringSQLType{}, nil
|
||||
case "integer":
|
||||
return bigtable.Int64SQLType{}, nil
|
||||
case "float":
|
||||
return bigtable.Float64SQLType{}, nil
|
||||
case "array":
|
||||
return bigtable.ArraySQLType{}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unknow param type %s", paramType)
|
||||
}
|
||||
|
||||
btParams := make(map[string]bigtable.SQLType)
|
||||
for _, p := range params {
|
||||
switch paramTypeMap[p.Name] {
|
||||
case "boolean":
|
||||
btParams[p.Name] = bigtable.BoolSQLType{}
|
||||
case "string":
|
||||
btParams[p.Name] = bigtable.StringSQLType{}
|
||||
case "integer":
|
||||
btParams[p.Name] = bigtable.Int64SQLType{}
|
||||
case "float":
|
||||
btParams[p.Name] = bigtable.Float64SQLType{}
|
||||
case "array":
|
||||
btParams[p.Name] = bigtable.ArraySQLType{}
|
||||
}
|
||||
}
|
||||
|
||||
return btParams, nil
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func getMapParamsType(tparams tools.Parameters, params tools.ParamValues) (map[string]bigtable.SQLType, error) {
|
||||
btParamTypes := make(map[string]bigtable.SQLType)
|
||||
for _, p := range tparams {
|
||||
if p.GetType() == "array" {
|
||||
itemType, err := getBigtableType(p.Manifest().Items.Type)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
btParamTypes[p.GetName()] = bigtable.ArraySQLType{
|
||||
ElemType: itemType,
|
||||
}
|
||||
continue
|
||||
}
|
||||
paramType, err := getBigtableType(p.GetType())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
btParamTypes[p.GetName()] = paramType
|
||||
}
|
||||
return btParamTypes, nil
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
if err != nil {
|
||||
|
||||
@@ -15,6 +15,7 @@
|
||||
package tools
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
@@ -23,3 +24,50 @@ var validName = regexp.MustCompile(`^[a-zA-Z0-9_-]*$`)
|
||||
func IsValidName(s string) bool {
|
||||
return validName.MatchString(s)
|
||||
}
|
||||
|
||||
func ConvertAnySliceToTyped(s []any, itemType string) (any, error) {
|
||||
var typedSlice any
|
||||
switch itemType {
|
||||
case "string":
|
||||
tempSlice := make([]string, len(s))
|
||||
for j, item := range s {
|
||||
s, ok := item.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected item at index %d to be string, got %T", j, item)
|
||||
}
|
||||
tempSlice[j] = s
|
||||
}
|
||||
typedSlice = tempSlice
|
||||
case "integer":
|
||||
tempSlice := make([]int64, len(s))
|
||||
for j, item := range s {
|
||||
i, ok := item.(int)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected item at index %d to be integer, got %T", j, item)
|
||||
}
|
||||
tempSlice[j] = int64(i)
|
||||
}
|
||||
typedSlice = tempSlice
|
||||
case "float":
|
||||
tempSlice := make([]float64, len(s))
|
||||
for j, item := range s {
|
||||
f, ok := item.(float64)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected item at index %d to be float, got %T", j, item)
|
||||
}
|
||||
tempSlice[j] = f
|
||||
}
|
||||
typedSlice = tempSlice
|
||||
case "boolean":
|
||||
tempSlice := make([]bool, len(s))
|
||||
for j, item := range s {
|
||||
b, ok := item.(bool)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("expected item at index %d to be boolean, got %T", j, item)
|
||||
}
|
||||
tempSlice[j] = b
|
||||
}
|
||||
typedSlice = tempSlice
|
||||
}
|
||||
return typedSlice, nil
|
||||
}
|
||||
|
||||
@@ -125,7 +125,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
namedParamsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, namedParamsMap)
|
||||
if err != nil {
|
||||
|
||||
@@ -120,7 +120,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMapWithDollarPrefix()
|
||||
|
||||
resp, err := t.DgraphClient.ExecuteQuery(t.Statement, paramsMap, t.IsQuery, t.Timeout)
|
||||
@@ -132,7 +132,6 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, erro
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var out []any
|
||||
var result struct {
|
||||
Data map[string]interface{} `json:"data"`
|
||||
}
|
||||
@@ -140,9 +139,8 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, erro
|
||||
if err := json.Unmarshal(resp, &result); err != nil {
|
||||
return nil, fmt.Errorf("error parsing JSON: %v", err)
|
||||
}
|
||||
out = append(out, result.Data)
|
||||
|
||||
return out, nil
|
||||
return result.Data, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claimsMap map[string]map[string]any) (tools.ParamValues, error) {
|
||||
|
||||
@@ -0,0 +1,194 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoredeletedocuments
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
firestoreapi "cloud.google.com/go/firestore"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
const kind string = "firestore-delete-documents"
|
||||
const documentPathsKey string = "documentPaths"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
FirestoreClient() *firestoreapi.Client
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
documentPathsParameter := tools.NewArrayParameter(documentPathsKey, "Array of document paths to delete from Firestore.", tools.NewStringParameter("item", "Document path"))
|
||||
parameters := tools.Parameters{documentPathsParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.FirestoreClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *firestoreapi.Client
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
documentPathsRaw, ok := mapParams[documentPathsKey].([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected an array", documentPathsKey)
|
||||
}
|
||||
|
||||
if len(documentPathsRaw) == 0 {
|
||||
return nil, fmt.Errorf("'%s' parameter cannot be empty", documentPathsKey)
|
||||
}
|
||||
|
||||
// Use ConvertAnySliceToTyped to convert the slice
|
||||
typedSlice, err := tools.ConvertAnySliceToTyped(documentPathsRaw, "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to convert document paths: %w", err)
|
||||
}
|
||||
|
||||
documentPaths, ok := typedSlice.([]string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected type conversion error for document paths")
|
||||
}
|
||||
|
||||
// Create a BulkWriter to handle multiple deletions efficiently
|
||||
bulkWriter := t.Client.BulkWriter(ctx)
|
||||
|
||||
// Keep track of jobs for each document
|
||||
jobs := make([]*firestoreapi.BulkWriterJob, len(documentPaths))
|
||||
|
||||
// Add all delete operations to the BulkWriter
|
||||
for i, path := range documentPaths {
|
||||
docRef := t.Client.Doc(path)
|
||||
job, err := bulkWriter.Delete(docRef)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to add delete operation for document %q: %w", path, err)
|
||||
}
|
||||
jobs[i] = job
|
||||
}
|
||||
|
||||
// End the BulkWriter to execute all operations
|
||||
bulkWriter.End()
|
||||
|
||||
// Collect results
|
||||
results := make([]any, len(documentPaths))
|
||||
for i, job := range jobs {
|
||||
docData := make(map[string]any)
|
||||
docData["path"] = documentPaths[i]
|
||||
|
||||
// Wait for the job to complete and get the result
|
||||
_, err := job.Results()
|
||||
if err != nil {
|
||||
docData["success"] = false
|
||||
docData["error"] = err.Error()
|
||||
} else {
|
||||
docData["success"] = true
|
||||
}
|
||||
|
||||
results[i] = docData
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoredeletedocuments_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreDeleteDocuments(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
delete_docs_tool:
|
||||
kind: firestore-delete-documents
|
||||
source: my-firestore-instance
|
||||
description: Delete documents from Firestore by paths
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"delete_docs_tool": firestoredeletedocuments.Config{
|
||||
Name: "delete_docs_tool",
|
||||
Kind: "firestore-delete-documents",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Delete documents from Firestore by paths",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_delete_docs:
|
||||
kind: firestore-delete-documents
|
||||
source: prod-firestore
|
||||
description: Delete documents with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_delete_docs": firestoredeletedocuments.Config{
|
||||
Name: "secure_delete_docs",
|
||||
Kind: "firestore-delete-documents",
|
||||
Source: "prod-firestore",
|
||||
Description: "Delete documents with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
delete_user_docs:
|
||||
kind: firestore-delete-documents
|
||||
source: users-firestore
|
||||
description: Delete user documents
|
||||
authRequired:
|
||||
- user-auth
|
||||
delete_product_docs:
|
||||
kind: firestore-delete-documents
|
||||
source: products-firestore
|
||||
description: Delete product documents
|
||||
delete_order_docs:
|
||||
kind: firestore-delete-documents
|
||||
source: orders-firestore
|
||||
description: Delete order documents
|
||||
authRequired:
|
||||
- user-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"delete_user_docs": firestoredeletedocuments.Config{
|
||||
Name: "delete_user_docs",
|
||||
Kind: "firestore-delete-documents",
|
||||
Source: "users-firestore",
|
||||
Description: "Delete user documents",
|
||||
AuthRequired: []string{"user-auth"},
|
||||
},
|
||||
"delete_product_docs": firestoredeletedocuments.Config{
|
||||
Name: "delete_product_docs",
|
||||
Kind: "firestore-delete-documents",
|
||||
Source: "products-firestore",
|
||||
Description: "Delete product documents",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"delete_order_docs": firestoredeletedocuments.Config{
|
||||
Name: "delete_order_docs",
|
||||
Kind: "firestore-delete-documents",
|
||||
Source: "orders-firestore",
|
||||
Description: "Delete order documents",
|
||||
AuthRequired: []string{"user-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,186 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoregetdocuments
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
firestoreapi "cloud.google.com/go/firestore"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
const kind string = "firestore-get-documents"
|
||||
const documentPathsKey string = "documentPaths"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
FirestoreClient() *firestoreapi.Client
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
documentPathsParameter := tools.NewArrayParameter(documentPathsKey, "Array of document paths to retrieve from Firestore.", tools.NewStringParameter("item", "Document path"))
|
||||
parameters := tools.Parameters{documentPathsParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.FirestoreClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *firestoreapi.Client
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
documentPathsRaw, ok := mapParams[documentPathsKey].([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected an array", documentPathsKey)
|
||||
}
|
||||
|
||||
if len(documentPathsRaw) == 0 {
|
||||
return nil, fmt.Errorf("'%s' parameter cannot be empty", documentPathsKey)
|
||||
}
|
||||
|
||||
// Use ConvertAnySliceToTyped to convert the slice
|
||||
typedSlice, err := tools.ConvertAnySliceToTyped(documentPathsRaw, "string")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to convert document paths: %w", err)
|
||||
}
|
||||
|
||||
documentPaths, ok := typedSlice.([]string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unexpected type conversion error for document paths")
|
||||
}
|
||||
|
||||
// Create document references from paths
|
||||
docRefs := make([]*firestoreapi.DocumentRef, len(documentPaths))
|
||||
for i, path := range documentPaths {
|
||||
docRefs[i] = t.Client.Doc(path)
|
||||
}
|
||||
|
||||
// Get all documents
|
||||
snapshots, err := t.Client.GetAll(ctx, docRefs)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get documents: %w", err)
|
||||
}
|
||||
|
||||
// Convert snapshots to response data
|
||||
results := make([]any, len(snapshots))
|
||||
for i, snapshot := range snapshots {
|
||||
docData := make(map[string]any)
|
||||
docData["path"] = documentPaths[i]
|
||||
docData["exists"] = snapshot.Exists()
|
||||
|
||||
if snapshot.Exists() {
|
||||
docData["data"] = snapshot.Data()
|
||||
docData["createTime"] = snapshot.CreateTime
|
||||
docData["updateTime"] = snapshot.UpdateTime
|
||||
docData["readTime"] = snapshot.ReadTime
|
||||
}
|
||||
|
||||
results[i] = docData
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoregetdocuments_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreGetDocuments(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
get_docs_tool:
|
||||
kind: firestore-get-documents
|
||||
source: my-firestore-instance
|
||||
description: Retrieve documents from Firestore by paths
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"get_docs_tool": firestoregetdocuments.Config{
|
||||
Name: "get_docs_tool",
|
||||
Kind: "firestore-get-documents",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Retrieve documents from Firestore by paths",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_get_docs:
|
||||
kind: firestore-get-documents
|
||||
source: prod-firestore
|
||||
description: Get documents with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_get_docs": firestoregetdocuments.Config{
|
||||
Name: "secure_get_docs",
|
||||
Kind: "firestore-get-documents",
|
||||
Source: "prod-firestore",
|
||||
Description: "Get documents with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
get_user_docs:
|
||||
kind: firestore-get-documents
|
||||
source: users-firestore
|
||||
description: Get user documents
|
||||
authRequired:
|
||||
- user-auth
|
||||
get_product_docs:
|
||||
kind: firestore-get-documents
|
||||
source: products-firestore
|
||||
description: Get product documents
|
||||
get_order_docs:
|
||||
kind: firestore-get-documents
|
||||
source: orders-firestore
|
||||
description: Get order documents
|
||||
authRequired:
|
||||
- user-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"get_user_docs": firestoregetdocuments.Config{
|
||||
Name: "get_user_docs",
|
||||
Kind: "firestore-get-documents",
|
||||
Source: "users-firestore",
|
||||
Description: "Get user documents",
|
||||
AuthRequired: []string{"user-auth"},
|
||||
},
|
||||
"get_product_docs": firestoregetdocuments.Config{
|
||||
Name: "get_product_docs",
|
||||
Kind: "firestore-get-documents",
|
||||
Source: "products-firestore",
|
||||
Description: "Get product documents",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"get_order_docs": firestoregetdocuments.Config{
|
||||
Name: "get_order_docs",
|
||||
Kind: "firestore-get-documents",
|
||||
Source: "orders-firestore",
|
||||
Description: "Get order documents",
|
||||
AuthRequired: []string{"user-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
159
internal/tools/firestore/firestoregetrules/firestoregetrules.go
Normal file
159
internal/tools/firestore/firestoregetrules/firestoregetrules.go
Normal file
@@ -0,0 +1,159 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoregetrules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"google.golang.org/api/firebaserules/v1"
|
||||
)
|
||||
|
||||
const kind string = "firestore-get-rules"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
FirebaseRulesClient() *firebaserules.Service
|
||||
GetProjectId() string
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
// No parameters needed for this tool
|
||||
parameters := tools.Parameters{}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
RulesClient: s.FirebaseRulesClient(),
|
||||
ProjectId: s.GetProjectId(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
RulesClient *firebaserules.Service
|
||||
ProjectId string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
// Get the latest release for Firestore
|
||||
releaseName := fmt.Sprintf("projects/%s/releases/cloud.firestore", t.ProjectId)
|
||||
release, err := t.RulesClient.Projects.Releases.Get(releaseName).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get latest Firestore release: %w", err)
|
||||
}
|
||||
|
||||
if release.RulesetName == "" {
|
||||
return nil, fmt.Errorf("no active Firestore rules were found in project '%s'", t.ProjectId)
|
||||
}
|
||||
|
||||
// Get the ruleset content
|
||||
ruleset, err := t.RulesClient.Projects.Rulesets.Get(release.RulesetName).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get ruleset content: %w", err)
|
||||
}
|
||||
|
||||
if ruleset.Source == nil || len(ruleset.Source.Files) == 0 {
|
||||
return nil, fmt.Errorf("no rules files found in ruleset")
|
||||
}
|
||||
|
||||
return ruleset, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestoregetrules_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreGetRules(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
get_rules_tool:
|
||||
kind: firestore-get-rules
|
||||
source: my-firestore-instance
|
||||
description: Retrieves the active Firestore security rules for the current project
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"get_rules_tool": firestoregetrules.Config{
|
||||
Name: "get_rules_tool",
|
||||
Kind: "firestore-get-rules",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Retrieves the active Firestore security rules for the current project",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_get_rules:
|
||||
kind: firestore-get-rules
|
||||
source: prod-firestore
|
||||
description: Get Firestore security rules with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- admin-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_get_rules": firestoregetrules.Config{
|
||||
Name: "secure_get_rules",
|
||||
Kind: "firestore-get-rules",
|
||||
Source: "prod-firestore",
|
||||
Description: "Get Firestore security rules with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "admin-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
get_dev_rules:
|
||||
kind: firestore-get-rules
|
||||
source: dev-firestore
|
||||
description: Get development Firestore rules
|
||||
authRequired:
|
||||
- dev-auth
|
||||
get_staging_rules:
|
||||
kind: firestore-get-rules
|
||||
source: staging-firestore
|
||||
description: Get staging Firestore rules
|
||||
get_prod_rules:
|
||||
kind: firestore-get-rules
|
||||
source: prod-firestore
|
||||
description: Get production Firestore rules
|
||||
authRequired:
|
||||
- prod-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"get_dev_rules": firestoregetrules.Config{
|
||||
Name: "get_dev_rules",
|
||||
Kind: "firestore-get-rules",
|
||||
Source: "dev-firestore",
|
||||
Description: "Get development Firestore rules",
|
||||
AuthRequired: []string{"dev-auth"},
|
||||
},
|
||||
"get_staging_rules": firestoregetrules.Config{
|
||||
Name: "get_staging_rules",
|
||||
Kind: "firestore-get-rules",
|
||||
Source: "staging-firestore",
|
||||
Description: "Get staging Firestore rules",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"get_prod_rules": firestoregetrules.Config{
|
||||
Name: "get_prod_rules",
|
||||
Kind: "firestore-get-rules",
|
||||
Source: "prod-firestore",
|
||||
Description: "Get production Firestore rules",
|
||||
AuthRequired: []string{"prod-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,175 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorelistcollections
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
firestoreapi "cloud.google.com/go/firestore"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
const kind string = "firestore-list-collections"
|
||||
const parentPathKey string = "parentPath"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
FirestoreClient() *firestoreapi.Client
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
emptyString := ""
|
||||
parentPathParameter := tools.NewStringParameterWithDefault(parentPathKey, emptyString, "Parent document path to list subcollections from. If not provided, lists root collections.")
|
||||
parameters := tools.Parameters{parentPathParameter}
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.FirestoreClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *firestoreapi.Client
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
|
||||
var collectionRefs []*firestoreapi.CollectionRef
|
||||
var err error
|
||||
|
||||
// Check if parentPath is provided
|
||||
parentPath, hasParent := mapParams[parentPathKey].(string)
|
||||
|
||||
if hasParent && parentPath != "" {
|
||||
// List subcollections of the specified document
|
||||
docRef := t.Client.Doc(parentPath)
|
||||
collectionRefs, err = docRef.Collections(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list subcollections of document %q: %w", parentPath, err)
|
||||
}
|
||||
} else {
|
||||
// List root collections
|
||||
collectionRefs, err = t.Client.Collections(ctx).GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to list root collections: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Convert collection references to response data
|
||||
results := make([]any, len(collectionRefs))
|
||||
for i, collRef := range collectionRefs {
|
||||
collData := make(map[string]any)
|
||||
collData["id"] = collRef.ID
|
||||
collData["path"] = collRef.Path
|
||||
|
||||
// If this is a subcollection, include parent information
|
||||
if collRef.Parent != nil {
|
||||
collData["parent"] = collRef.Parent.Path
|
||||
}
|
||||
|
||||
results[i] = collData
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorelistcollections_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreListCollections(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
list_collections_tool:
|
||||
kind: firestore-list-collections
|
||||
source: my-firestore-instance
|
||||
description: List collections in Firestore
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"list_collections_tool": firestorelistcollections.Config{
|
||||
Name: "list_collections_tool",
|
||||
Kind: "firestore-list-collections",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "List collections in Firestore",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_list_collections:
|
||||
kind: firestore-list-collections
|
||||
source: prod-firestore
|
||||
description: List collections with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_list_collections": firestorelistcollections.Config{
|
||||
Name: "secure_list_collections",
|
||||
Kind: "firestore-list-collections",
|
||||
Source: "prod-firestore",
|
||||
Description: "List collections with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
list_user_collections:
|
||||
kind: firestore-list-collections
|
||||
source: users-firestore
|
||||
description: List user-related collections
|
||||
authRequired:
|
||||
- user-auth
|
||||
list_product_collections:
|
||||
kind: firestore-list-collections
|
||||
source: products-firestore
|
||||
description: List product-related collections
|
||||
list_admin_collections:
|
||||
kind: firestore-list-collections
|
||||
source: admin-firestore
|
||||
description: List administrative collections
|
||||
authRequired:
|
||||
- user-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"list_user_collections": firestorelistcollections.Config{
|
||||
Name: "list_user_collections",
|
||||
Kind: "firestore-list-collections",
|
||||
Source: "users-firestore",
|
||||
Description: "List user-related collections",
|
||||
AuthRequired: []string{"user-auth"},
|
||||
},
|
||||
"list_product_collections": firestorelistcollections.Config{
|
||||
Name: "list_product_collections",
|
||||
Kind: "firestore-list-collections",
|
||||
Source: "products-firestore",
|
||||
Description: "List product-related collections",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"list_admin_collections": firestorelistcollections.Config{
|
||||
Name: "list_admin_collections",
|
||||
Kind: "firestore-list-collections",
|
||||
Source: "admin-firestore",
|
||||
Description: "List administrative collections",
|
||||
AuthRequired: []string{"user-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,529 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorequerycollection
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
firestoreapi "cloud.google.com/go/firestore"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// Constants for tool configuration
|
||||
const (
|
||||
kind = "firestore-query-collection"
|
||||
defaultLimit = 100
|
||||
defaultAnalyze = false
|
||||
maxFilterLength = 100 // Maximum filters to prevent abuse
|
||||
)
|
||||
|
||||
// Parameter keys
|
||||
const (
|
||||
collectionPathKey = "collectionPath"
|
||||
filtersKey = "filters"
|
||||
orderByKey = "orderBy"
|
||||
limitKey = "limit"
|
||||
analyzeQueryKey = "analyzeQuery"
|
||||
)
|
||||
|
||||
// Firestore operators
|
||||
var validOperators = map[string]bool{
|
||||
"<": true,
|
||||
"<=": true,
|
||||
">": true,
|
||||
">=": true,
|
||||
"==": true,
|
||||
"!=": true,
|
||||
"array-contains": true,
|
||||
"array-contains-any": true,
|
||||
"in": true,
|
||||
"not-in": true,
|
||||
}
|
||||
|
||||
// Error messages
|
||||
const (
|
||||
errMissingCollectionPath = "invalid or missing '%s' parameter"
|
||||
errInvalidFilters = "invalid '%s' parameter; expected an array"
|
||||
errFilterNotString = "filter at index %d is not a string"
|
||||
errFilterParseFailed = "failed to parse filter at index %d: %w"
|
||||
errInvalidOperator = "unsupported operator: %s. Valid operators are: %v"
|
||||
errMissingFilterValue = "no value specified for filter on field '%s'"
|
||||
errOrderByParseFailed = "failed to parse orderBy: %w"
|
||||
errQueryExecutionFailed = "failed to execute query: %w"
|
||||
errTooManyFilters = "too many filters provided: %d (maximum: %d)"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
// compatibleSource defines the interface for sources that can provide a Firestore client
|
||||
type compatibleSource interface {
|
||||
FirestoreClient() *firestoreapi.Client
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
// Config represents the configuration for the Firestore query collection tool
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
// ToolConfigKind returns the kind of tool configuration
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
// Initialize creates a new Tool instance from the configuration
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
// Create parameters
|
||||
parameters := createParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Client: s.FirestoreClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// createParameters creates the parameter definitions for the tool
|
||||
func createParameters() tools.Parameters {
|
||||
collectionPathParameter := tools.NewStringParameter(
|
||||
collectionPathKey,
|
||||
"The path to the Firestore collection to query",
|
||||
)
|
||||
|
||||
filtersDescription := `Array of filter objects to apply to the query. Each filter is a JSON string with:
|
||||
- field: The field name to filter on
|
||||
- op: The operator to use ("<", "<=", ">", ">=", "==", "!=", "array-contains", "array-contains-any", "in", "not-in")
|
||||
- value: The value to compare against (can be string, number, boolean, or array)
|
||||
Example: {"field": "age", "op": ">", "value": 18}`
|
||||
|
||||
filtersParameter := tools.NewArrayParameter(
|
||||
filtersKey,
|
||||
filtersDescription,
|
||||
tools.NewStringParameter("item", "JSON string representation of a filter object"),
|
||||
)
|
||||
|
||||
orderByParameter := tools.NewStringParameter(
|
||||
orderByKey,
|
||||
"JSON string specifying the field and direction to order by (e.g., {\"field\": \"name\", \"direction\": \"ASCENDING\"}). Leave empty if not specified",
|
||||
)
|
||||
|
||||
limitParameter := tools.NewIntParameterWithDefault(
|
||||
limitKey,
|
||||
defaultLimit,
|
||||
"The maximum number of documents to return",
|
||||
)
|
||||
|
||||
analyzeQueryParameter := tools.NewBooleanParameterWithDefault(
|
||||
analyzeQueryKey,
|
||||
defaultAnalyze,
|
||||
"If true, returns query explain metrics including execution statistics",
|
||||
)
|
||||
|
||||
return tools.Parameters{
|
||||
collectionPathParameter,
|
||||
filtersParameter,
|
||||
orderByParameter,
|
||||
limitParameter,
|
||||
analyzeQueryParameter,
|
||||
}
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
// Tool represents the Firestore query collection tool
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *firestoreapi.Client
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// FilterConfig represents a filter for the query
|
||||
type FilterConfig struct {
|
||||
Field string `json:"field"`
|
||||
Op string `json:"op"`
|
||||
Value interface{} `json:"value"`
|
||||
}
|
||||
|
||||
// Validate checks if the filter configuration is valid
|
||||
func (f *FilterConfig) Validate() error {
|
||||
if f.Field == "" {
|
||||
return fmt.Errorf("filter field cannot be empty")
|
||||
}
|
||||
|
||||
if !validOperators[f.Op] {
|
||||
ops := make([]string, 0, len(validOperators))
|
||||
for op := range validOperators {
|
||||
ops = append(ops, op)
|
||||
}
|
||||
return fmt.Errorf(errInvalidOperator, f.Op, ops)
|
||||
}
|
||||
|
||||
if f.Value == nil {
|
||||
return fmt.Errorf(errMissingFilterValue, f.Field)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// OrderByConfig represents ordering configuration
|
||||
type OrderByConfig struct {
|
||||
Field string `json:"field"`
|
||||
Direction string `json:"direction"`
|
||||
}
|
||||
|
||||
// GetDirection returns the Firestore direction constant
|
||||
func (o *OrderByConfig) GetDirection() firestoreapi.Direction {
|
||||
if strings.EqualFold(o.Direction, "DESCENDING") {
|
||||
return firestoreapi.Desc
|
||||
}
|
||||
return firestoreapi.Asc
|
||||
}
|
||||
|
||||
// QueryResult represents a document result from the query
|
||||
type QueryResult struct {
|
||||
ID string `json:"id"`
|
||||
Path string `json:"path"`
|
||||
Data map[string]any `json:"data"`
|
||||
CreateTime interface{} `json:"createTime,omitempty"`
|
||||
UpdateTime interface{} `json:"updateTime,omitempty"`
|
||||
ReadTime interface{} `json:"readTime,omitempty"`
|
||||
}
|
||||
|
||||
// QueryResponse represents the full response including optional metrics
|
||||
type QueryResponse struct {
|
||||
Documents []QueryResult `json:"documents"`
|
||||
ExplainMetrics map[string]any `json:"explainMetrics,omitempty"`
|
||||
}
|
||||
|
||||
// Invoke executes the Firestore query based on the provided parameters
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
// Parse parameters
|
||||
queryParams, err := t.parseQueryParameters(params)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Build the query
|
||||
query, err := t.buildQuery(queryParams)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Execute the query and return results
|
||||
return t.executeQuery(ctx, query, queryParams.AnalyzeQuery)
|
||||
}
|
||||
|
||||
// queryParameters holds all parsed query parameters
|
||||
type queryParameters struct {
|
||||
CollectionPath string
|
||||
Filters []FilterConfig
|
||||
OrderBy *OrderByConfig
|
||||
Limit int
|
||||
AnalyzeQuery bool
|
||||
}
|
||||
|
||||
// parseQueryParameters extracts and validates parameters from the input
|
||||
func (t Tool) parseQueryParameters(params tools.ParamValues) (*queryParameters, error) {
|
||||
mapParams := params.AsMap()
|
||||
|
||||
// Get collection path
|
||||
collectionPath, ok := mapParams[collectionPathKey].(string)
|
||||
if !ok || collectionPath == "" {
|
||||
return nil, fmt.Errorf(errMissingCollectionPath, collectionPathKey)
|
||||
}
|
||||
|
||||
result := &queryParameters{
|
||||
CollectionPath: collectionPath,
|
||||
Limit: defaultLimit,
|
||||
AnalyzeQuery: defaultAnalyze,
|
||||
}
|
||||
|
||||
// Parse filters
|
||||
if filtersRaw, ok := mapParams[filtersKey]; ok && filtersRaw != nil {
|
||||
filters, err := t.parseFilters(filtersRaw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.Filters = filters
|
||||
}
|
||||
|
||||
// Parse orderBy
|
||||
if orderByRaw, ok := mapParams[orderByKey]; ok && orderByRaw != nil {
|
||||
orderBy, err := t.parseOrderBy(orderByRaw)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result.OrderBy = orderBy
|
||||
}
|
||||
|
||||
// Parse limit
|
||||
if limit, ok := mapParams[limitKey].(int); ok {
|
||||
result.Limit = limit
|
||||
}
|
||||
|
||||
// Parse analyze
|
||||
if analyze, ok := mapParams[analyzeQueryKey].(bool); ok {
|
||||
result.AnalyzeQuery = analyze
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseFilters parses and validates filter configurations
|
||||
func (t Tool) parseFilters(filtersRaw interface{}) ([]FilterConfig, error) {
|
||||
filters, ok := filtersRaw.([]any)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf(errInvalidFilters, filtersKey)
|
||||
}
|
||||
|
||||
if len(filters) > maxFilterLength {
|
||||
return nil, fmt.Errorf(errTooManyFilters, len(filters), maxFilterLength)
|
||||
}
|
||||
|
||||
result := make([]FilterConfig, 0, len(filters))
|
||||
for i, filterRaw := range filters {
|
||||
filterJSON, ok := filterRaw.(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf(errFilterNotString, i)
|
||||
}
|
||||
|
||||
var filter FilterConfig
|
||||
if err := json.Unmarshal([]byte(filterJSON), &filter); err != nil {
|
||||
return nil, fmt.Errorf(errFilterParseFailed, i, err)
|
||||
}
|
||||
|
||||
if err := filter.Validate(); err != nil {
|
||||
return nil, fmt.Errorf("filter at index %d is invalid: %w", i, err)
|
||||
}
|
||||
|
||||
result = append(result, filter)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// parseOrderBy parses the orderBy configuration
|
||||
func (t Tool) parseOrderBy(orderByRaw interface{}) (*OrderByConfig, error) {
|
||||
orderByJSON, ok := orderByRaw.(string)
|
||||
if !ok || orderByJSON == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var orderBy OrderByConfig
|
||||
if err := json.Unmarshal([]byte(orderByJSON), &orderBy); err != nil {
|
||||
return nil, fmt.Errorf(errOrderByParseFailed, err)
|
||||
}
|
||||
|
||||
if orderBy.Field == "" {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return &orderBy, nil
|
||||
}
|
||||
|
||||
// buildQuery constructs the Firestore query from parameters
|
||||
func (t Tool) buildQuery(params *queryParameters) (*firestoreapi.Query, error) {
|
||||
collection := t.Client.Collection(params.CollectionPath)
|
||||
query := collection.Query
|
||||
|
||||
// Apply filters
|
||||
if len(params.Filters) > 0 {
|
||||
filterConditions := make([]firestoreapi.EntityFilter, 0, len(params.Filters))
|
||||
for _, filter := range params.Filters {
|
||||
filterConditions = append(filterConditions, firestoreapi.PropertyFilter{
|
||||
Path: filter.Field,
|
||||
Operator: filter.Op,
|
||||
Value: filter.Value,
|
||||
})
|
||||
}
|
||||
|
||||
query = query.WhereEntity(firestoreapi.AndFilter{
|
||||
Filters: filterConditions,
|
||||
})
|
||||
}
|
||||
|
||||
// Apply ordering
|
||||
if params.OrderBy != nil {
|
||||
query = query.OrderBy(params.OrderBy.Field, params.OrderBy.GetDirection())
|
||||
}
|
||||
|
||||
// Apply limit
|
||||
query = query.Limit(params.Limit)
|
||||
|
||||
// Apply analyze options
|
||||
if params.AnalyzeQuery {
|
||||
query = query.WithRunOptions(firestoreapi.ExplainOptions{
|
||||
Analyze: true,
|
||||
})
|
||||
}
|
||||
|
||||
return &query, nil
|
||||
}
|
||||
|
||||
// executeQuery runs the query and formats the results
|
||||
func (t Tool) executeQuery(ctx context.Context, query *firestoreapi.Query, analyzeQuery bool) (any, error) {
|
||||
docIterator := query.Documents(ctx)
|
||||
docs, err := docIterator.GetAll()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf(errQueryExecutionFailed, err)
|
||||
}
|
||||
|
||||
// Convert results to structured format
|
||||
results := make([]QueryResult, len(docs))
|
||||
for i, doc := range docs {
|
||||
results[i] = QueryResult{
|
||||
ID: doc.Ref.ID,
|
||||
Path: doc.Ref.Path,
|
||||
Data: doc.Data(),
|
||||
CreateTime: doc.CreateTime,
|
||||
UpdateTime: doc.UpdateTime,
|
||||
ReadTime: doc.ReadTime,
|
||||
}
|
||||
}
|
||||
|
||||
// Return with explain metrics if requested
|
||||
if analyzeQuery {
|
||||
explainMetrics, err := t.getExplainMetrics(docIterator)
|
||||
if err == nil && explainMetrics != nil {
|
||||
response := QueryResponse{
|
||||
Documents: results,
|
||||
ExplainMetrics: explainMetrics,
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Return just the documents
|
||||
resultsAny := make([]any, len(results))
|
||||
for i, r := range results {
|
||||
resultsAny[i] = r
|
||||
}
|
||||
return resultsAny, nil
|
||||
}
|
||||
|
||||
// getExplainMetrics extracts explain metrics from the query iterator
|
||||
func (t Tool) getExplainMetrics(docIterator *firestoreapi.DocumentIterator) (map[string]any, error) {
|
||||
explainMetrics, err := docIterator.ExplainMetrics()
|
||||
if err != nil || explainMetrics == nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
metricsData := make(map[string]any)
|
||||
|
||||
// Add plan summary if available
|
||||
if explainMetrics.PlanSummary != nil {
|
||||
planSummary := make(map[string]any)
|
||||
planSummary["indexesUsed"] = explainMetrics.PlanSummary.IndexesUsed
|
||||
metricsData["planSummary"] = planSummary
|
||||
}
|
||||
|
||||
// Add execution stats if available
|
||||
if explainMetrics.ExecutionStats != nil {
|
||||
executionStats := make(map[string]any)
|
||||
executionStats["resultsReturned"] = explainMetrics.ExecutionStats.ResultsReturned
|
||||
executionStats["readOperations"] = explainMetrics.ExecutionStats.ReadOperations
|
||||
|
||||
if explainMetrics.ExecutionStats.ExecutionDuration != nil {
|
||||
executionStats["executionDuration"] = explainMetrics.ExecutionStats.ExecutionDuration.String()
|
||||
}
|
||||
|
||||
if explainMetrics.ExecutionStats.DebugStats != nil {
|
||||
executionStats["debugStats"] = *explainMetrics.ExecutionStats.DebugStats
|
||||
}
|
||||
|
||||
metricsData["executionStats"] = executionStats
|
||||
}
|
||||
|
||||
return metricsData, nil
|
||||
}
|
||||
|
||||
// ParseParams parses and validates input parameters
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
// Manifest returns the tool manifest
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
// McpManifest returns the MCP manifest
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
// Authorized checks if the tool is authorized based on verified auth services
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorequerycollection_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreQueryCollection(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
query_users_tool:
|
||||
kind: firestore-query-collection
|
||||
source: my-firestore-instance
|
||||
description: Query users collection with filters and ordering
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"query_users_tool": firestorequerycollection.Config{
|
||||
Name: "query_users_tool",
|
||||
Kind: "firestore-query-collection",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Query users collection with filters and ordering",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_query_tool:
|
||||
kind: firestore-query-collection
|
||||
source: prod-firestore
|
||||
description: Query collections with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_query_tool": firestorequerycollection.Config{
|
||||
Name: "secure_query_tool",
|
||||
Kind: "firestore-query-collection",
|
||||
Source: "prod-firestore",
|
||||
Description: "Query collections with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
query_users:
|
||||
kind: firestore-query-collection
|
||||
source: users-firestore
|
||||
description: Query user documents with filtering
|
||||
authRequired:
|
||||
- user-auth
|
||||
query_products:
|
||||
kind: firestore-query-collection
|
||||
source: products-firestore
|
||||
description: Query product catalog
|
||||
query_orders:
|
||||
kind: firestore-query-collection
|
||||
source: orders-firestore
|
||||
description: Query customer orders with complex filters
|
||||
authRequired:
|
||||
- user-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"query_users": firestorequerycollection.Config{
|
||||
Name: "query_users",
|
||||
Kind: "firestore-query-collection",
|
||||
Source: "users-firestore",
|
||||
Description: "Query user documents with filtering",
|
||||
AuthRequired: []string{"user-auth"},
|
||||
},
|
||||
"query_products": firestorequerycollection.Config{
|
||||
Name: "query_products",
|
||||
Kind: "firestore-query-collection",
|
||||
Source: "products-firestore",
|
||||
Description: "Query product catalog",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"query_orders": firestorequerycollection.Config{
|
||||
Name: "query_orders",
|
||||
Kind: "firestore-query-collection",
|
||||
Source: "orders-firestore",
|
||||
Description: "Query customer orders with complex filters",
|
||||
AuthRequired: []string{"user-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,292 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorevalidaterules
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
firestoreds "github.com/googleapis/genai-toolbox/internal/sources/firestore"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"google.golang.org/api/firebaserules/v1"
|
||||
)
|
||||
|
||||
const kind string = "firestore-validate-rules"
|
||||
|
||||
// Parameter keys
|
||||
const (
|
||||
sourceKey = "source"
|
||||
)
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
FirebaseRulesClient() *firebaserules.Service
|
||||
GetProjectId() string
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &firestoreds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{firestoreds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
// Create parameters
|
||||
parameters := createParameters()
|
||||
|
||||
mcpManifest := tools.McpManifest{
|
||||
Name: cfg.Name,
|
||||
Description: cfg.Description,
|
||||
InputSchema: parameters.McpManifest(),
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
RulesClient: s.FirebaseRulesClient(),
|
||||
ProjectId: s.GetProjectId(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// createParameters creates the parameter definitions for the tool
|
||||
func createParameters() tools.Parameters {
|
||||
sourceParameter := tools.NewStringParameter(
|
||||
sourceKey,
|
||||
"The Firestore Rules source code to validate",
|
||||
)
|
||||
|
||||
return tools.Parameters{sourceParameter}
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
RulesClient *firebaserules.Service
|
||||
ProjectId string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// Issue represents a validation issue in the rules
|
||||
type Issue struct {
|
||||
SourcePosition SourcePosition `json:"sourcePosition"`
|
||||
Description string `json:"description"`
|
||||
Severity string `json:"severity"`
|
||||
}
|
||||
|
||||
// SourcePosition represents the location of an issue in the source
|
||||
type SourcePosition struct {
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
Line int64 `json:"line"` // 1-based
|
||||
Column int64 `json:"column"` // 1-based
|
||||
CurrentOffset int64 `json:"currentOffset"` // 0-based, inclusive start
|
||||
EndOffset int64 `json:"endOffset"` // 0-based, exclusive end
|
||||
}
|
||||
|
||||
// ValidationResult represents the result of rules validation
|
||||
type ValidationResult struct {
|
||||
Valid bool `json:"valid"`
|
||||
IssueCount int `json:"issueCount"`
|
||||
FormattedIssues string `json:"formattedIssues,omitempty"`
|
||||
RawIssues []Issue `json:"rawIssues,omitempty"`
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
mapParams := params.AsMap()
|
||||
|
||||
// Get source parameter
|
||||
source, ok := mapParams[sourceKey].(string)
|
||||
if !ok || source == "" {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter", sourceKey)
|
||||
}
|
||||
|
||||
// Create test request
|
||||
testRequest := &firebaserules.TestRulesetRequest{
|
||||
Source: &firebaserules.Source{
|
||||
Files: []*firebaserules.File{
|
||||
{
|
||||
Name: "firestore.rules",
|
||||
Content: source,
|
||||
},
|
||||
},
|
||||
},
|
||||
// We don't need test cases for validation only
|
||||
TestSuite: &firebaserules.TestSuite{
|
||||
TestCases: []*firebaserules.TestCase{},
|
||||
},
|
||||
}
|
||||
|
||||
// Call the test API
|
||||
projectName := fmt.Sprintf("projects/%s", t.ProjectId)
|
||||
response, err := t.RulesClient.Projects.Test(projectName, testRequest).Context(ctx).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to validate rules: %w", err)
|
||||
}
|
||||
|
||||
// Process the response
|
||||
result := t.processValidationResponse(response, source)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t Tool) processValidationResponse(response *firebaserules.TestRulesetResponse, source string) ValidationResult {
|
||||
if len(response.Issues) == 0 {
|
||||
return ValidationResult{
|
||||
Valid: true,
|
||||
IssueCount: 0,
|
||||
FormattedIssues: "✓ No errors detected. Rules are valid.",
|
||||
}
|
||||
}
|
||||
|
||||
// Convert issues to our format
|
||||
issues := make([]Issue, len(response.Issues))
|
||||
for i, issue := range response.Issues {
|
||||
issues[i] = Issue{
|
||||
Description: issue.Description,
|
||||
Severity: issue.Severity,
|
||||
SourcePosition: SourcePosition{
|
||||
FileName: issue.SourcePosition.FileName,
|
||||
Line: issue.SourcePosition.Line,
|
||||
Column: issue.SourcePosition.Column,
|
||||
CurrentOffset: issue.SourcePosition.CurrentOffset,
|
||||
EndOffset: issue.SourcePosition.EndOffset,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// Format issues
|
||||
formattedIssues := t.formatRulesetIssues(issues, source)
|
||||
|
||||
return ValidationResult{
|
||||
Valid: false,
|
||||
IssueCount: len(issues),
|
||||
FormattedIssues: formattedIssues,
|
||||
RawIssues: issues,
|
||||
}
|
||||
}
|
||||
|
||||
// formatRulesetIssues formats validation issues into a human-readable string with code snippets
|
||||
func (t Tool) formatRulesetIssues(issues []Issue, rulesSource string) string {
|
||||
sourceLines := strings.Split(rulesSource, "\n")
|
||||
var formattedOutput []string
|
||||
|
||||
formattedOutput = append(formattedOutput, fmt.Sprintf("Found %d issue(s) in rules source:\n", len(issues)))
|
||||
|
||||
for _, issue := range issues {
|
||||
issueString := fmt.Sprintf("%s: %s [Ln %d, Col %d]",
|
||||
issue.Severity,
|
||||
issue.Description,
|
||||
issue.SourcePosition.Line,
|
||||
issue.SourcePosition.Column)
|
||||
|
||||
if issue.SourcePosition.Line > 0 {
|
||||
lineIndex := int(issue.SourcePosition.Line - 1) // 0-based index
|
||||
if lineIndex >= 0 && lineIndex < len(sourceLines) {
|
||||
errorLine := sourceLines[lineIndex]
|
||||
issueString += fmt.Sprintf("\n```\n%s", errorLine)
|
||||
|
||||
// Add carets if we have column and offset information
|
||||
if issue.SourcePosition.Column > 0 &&
|
||||
issue.SourcePosition.CurrentOffset >= 0 &&
|
||||
issue.SourcePosition.EndOffset > issue.SourcePosition.CurrentOffset {
|
||||
|
||||
startColumn := int(issue.SourcePosition.Column - 1) // 0-based
|
||||
errorTokenLength := int(issue.SourcePosition.EndOffset - issue.SourcePosition.CurrentOffset)
|
||||
|
||||
if startColumn >= 0 && errorTokenLength > 0 && startColumn <= len(errorLine) {
|
||||
padding := strings.Repeat(" ", startColumn)
|
||||
carets := strings.Repeat("^", errorTokenLength)
|
||||
issueString += fmt.Sprintf("\n%s%s", padding, carets)
|
||||
}
|
||||
}
|
||||
issueString += "\n```"
|
||||
}
|
||||
}
|
||||
|
||||
formattedOutput = append(formattedOutput, issueString)
|
||||
}
|
||||
|
||||
return strings.Join(formattedOutput, "\n\n")
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package firestorevalidaterules_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
)
|
||||
|
||||
func TestParseFromYamlFirestoreValidateRules(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
validate_rules_tool:
|
||||
kind: firestore-validate-rules
|
||||
source: my-firestore-instance
|
||||
description: Validate Firestore security rules
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"validate_rules_tool": firestorevalidaterules.Config{
|
||||
Name: "validate_rules_tool",
|
||||
Kind: "firestore-validate-rules",
|
||||
Source: "my-firestore-instance",
|
||||
Description: "Validate Firestore security rules",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "with auth requirements",
|
||||
in: `
|
||||
tools:
|
||||
secure_validate_rules:
|
||||
kind: firestore-validate-rules
|
||||
source: prod-firestore
|
||||
description: Validate rules with authentication
|
||||
authRequired:
|
||||
- google-auth-service
|
||||
- api-key-service
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"secure_validate_rules": firestorevalidaterules.Config{
|
||||
Name: "secure_validate_rules",
|
||||
Kind: "firestore-validate-rules",
|
||||
Source: "prod-firestore",
|
||||
Description: "Validate rules with authentication",
|
||||
AuthRequired: []string{"google-auth-service", "api-key-service"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseFromYamlMultipleTools(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
in := `
|
||||
tools:
|
||||
validate_dev_rules:
|
||||
kind: firestore-validate-rules
|
||||
source: dev-firestore
|
||||
description: Validate development environment rules
|
||||
authRequired:
|
||||
- dev-auth
|
||||
validate_staging_rules:
|
||||
kind: firestore-validate-rules
|
||||
source: staging-firestore
|
||||
description: Validate staging environment rules
|
||||
validate_prod_rules:
|
||||
kind: firestore-validate-rules
|
||||
source: prod-firestore
|
||||
description: Validate production environment rules
|
||||
authRequired:
|
||||
- prod-auth
|
||||
- admin-auth
|
||||
`
|
||||
want := server.ToolConfigs{
|
||||
"validate_dev_rules": firestorevalidaterules.Config{
|
||||
Name: "validate_dev_rules",
|
||||
Kind: "firestore-validate-rules",
|
||||
Source: "dev-firestore",
|
||||
Description: "Validate development environment rules",
|
||||
AuthRequired: []string{"dev-auth"},
|
||||
},
|
||||
"validate_staging_rules": firestorevalidaterules.Config{
|
||||
Name: "validate_staging_rules",
|
||||
Kind: "firestore-validate-rules",
|
||||
Source: "staging-firestore",
|
||||
Description: "Validate staging environment rules",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
"validate_prod_rules": firestorevalidaterules.Config{
|
||||
Name: "validate_prod_rules",
|
||||
Kind: "firestore-validate-rules",
|
||||
Source: "prod-firestore",
|
||||
Description: "Validate production environment rules",
|
||||
AuthRequired: []string{"prod-auth", "admin-auth"},
|
||||
},
|
||||
}
|
||||
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err = yaml.UnmarshalContext(ctx, testutils.FormatYaml(in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
}
|
||||
@@ -275,7 +275,11 @@ func getURL(baseURL, path string, pathParams, queryParams tools.Parameters, defa
|
||||
// Set dynamic query parameters
|
||||
query := parsedURL.Query()
|
||||
for _, p := range queryParams {
|
||||
query.Add(p.GetName(), fmt.Sprintf("%v", paramsMap[p.GetName()]))
|
||||
v := paramsMap[p.GetName()]
|
||||
if v == nil {
|
||||
v = ""
|
||||
}
|
||||
query.Add(p.GetName(), fmt.Sprintf("%v", v))
|
||||
}
|
||||
parsedURL.RawQuery = query.Encode()
|
||||
return parsedURL.String(), nil
|
||||
@@ -299,7 +303,7 @@ func getHeaders(headerParams tools.Parameters, defaultHeaders map[string]string,
|
||||
return allHeaders, nil
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
// Calculate request body
|
||||
@@ -345,15 +349,9 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, erro
|
||||
var data any
|
||||
if err = json.Unmarshal(body, &data); err != nil {
|
||||
// if unable to unmarshal data, return result as string.
|
||||
return []any{string(body)}, nil
|
||||
return string(body), nil
|
||||
}
|
||||
// if data is a list, return as is.
|
||||
dataList, ok := data.([]any)
|
||||
if ok {
|
||||
return dataList, nil
|
||||
}
|
||||
// if data is not a list (e.g. single map), return data in list.
|
||||
return []any{data}, nil
|
||||
return data, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
|
||||
@@ -116,7 +116,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
sliceParams := params.AsSlice()
|
||||
sql, ok := sliceParams[0].(string)
|
||||
if !ok {
|
||||
|
||||
@@ -125,7 +125,7 @@ type Tool struct {
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
newStatement, err := tools.ResolveTemplateParams(t.TemplateParameters, t.Statement, paramsMap)
|
||||
if err != nil {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user