mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-15 02:18:10 -05:00
Compare commits
3 Commits
registry-n
...
multi_preb
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2ffa026c9b | ||
|
|
344a093a60 | ||
|
|
d833d1992d |
@@ -59,13 +59,6 @@ You can manually trigger the bot by commenting on your Pull Request:
|
||||
* `/gemini summary`: Posts a summary of the changes in the pull request.
|
||||
* `/gemini help`: Overview of the available commands
|
||||
|
||||
## Guidelines for Pull Requests
|
||||
|
||||
1. Please keep your PR small for more thorough review and easier updates. In case of regression, it also allows us to roll back a single feature instead of multiple ones.
|
||||
1. For non-trivial changes, consider opening an issue and discussing it with the code owners first.
|
||||
1. Provide a good PR description as a record of what change is being made and why it was made. Link to a GitHub issue if it exists.
|
||||
1. Make sure your code is thoroughly tested with unit tests and integration tests. Remember to clean up the test instances properly in your code to avoid memory leaks.
|
||||
|
||||
## Adding a New Database Source or Tool
|
||||
|
||||
Please create an
|
||||
@@ -117,8 +110,6 @@ implementation](https://github.com/googleapis/genai-toolbox/blob/main/internal/s
|
||||
We recommend looking at an [example tool
|
||||
implementation](https://github.com/googleapis/genai-toolbox/tree/main/internal/tools/postgres/postgressql).
|
||||
|
||||
Remember to keep your PRs small. For example, if you are contributing a new Source, only include one or two core Tools within the same PR, the rest of the Tools can come in subsequent PRs.
|
||||
|
||||
* **Create a new directory** under `internal/tools` for your tool type (e.g., `internal/tools/newdb/newdbtool`).
|
||||
* **Define a configuration struct** for your tool in a file named `newdbtool.go`.
|
||||
Create a `Config` struct and a `Tool` struct to store necessary parameters for
|
||||
@@ -172,8 +163,6 @@ tools.
|
||||
parameters][temp-param-doc]. Only run this test if template
|
||||
parameters apply to your tool.
|
||||
|
||||
* **Add additional tests** for the tools that are not covered by the predefined tests. Every tool must be tested!
|
||||
|
||||
* **Add the new database to the integration test workflow** in
|
||||
[integration.cloudbuild.yaml](.ci/integration.cloudbuild.yaml).
|
||||
|
||||
@@ -255,4 +244,4 @@ resources.
|
||||
* **PR Description:** PR description should **always** be included. It should
|
||||
include a concise description of the changes, it's impact, along with a
|
||||
summary of the solution. If the PR is related to a specific issue, the issue
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
number should be mentioned in the PR description (e.g. `Fixes #1`).
|
||||
65
cmd/root.go
65
cmd/root.go
@@ -92,7 +92,6 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
@@ -314,15 +313,15 @@ func Execute() {
|
||||
type Command struct {
|
||||
*cobra.Command
|
||||
|
||||
cfg server.ServerConfig
|
||||
logger log.Logger
|
||||
tools_file string
|
||||
tools_files []string
|
||||
tools_folder string
|
||||
prebuiltConfig string
|
||||
inStream io.Reader
|
||||
outStream io.Writer
|
||||
errStream io.Writer
|
||||
cfg server.ServerConfig
|
||||
logger log.Logger
|
||||
tools_file string
|
||||
tools_files []string
|
||||
tools_folder string
|
||||
prebuiltConfigs []string
|
||||
inStream io.Reader
|
||||
outStream io.Writer
|
||||
errStream io.Writer
|
||||
}
|
||||
|
||||
// NewCommand returns a Command object representing an invocation of the CLI.
|
||||
@@ -375,10 +374,10 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
// Fetch prebuilt tools sources to customize the help description
|
||||
prebuiltHelp := fmt.Sprintf(
|
||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'.",
|
||||
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
|
||||
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
|
||||
)
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", prebuiltHelp)
|
||||
flags.StringSliceVar(&cmd.prebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
flags.BoolVar(&cmd.cfg.DisableReload, "disable-reload", false, "Disables dynamic reloading of tools file.")
|
||||
flags.BoolVar(&cmd.cfg.UI, "ui", false, "Launches the Toolbox UI web server.")
|
||||
@@ -865,24 +864,32 @@ func run(cmd *Command) error {
|
||||
var allToolsFiles []ToolsFile
|
||||
|
||||
// Load Prebuilt Configuration
|
||||
if cmd.prebuiltConfig != "" {
|
||||
buf, err := prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
||||
cmd.logger.InfoContext(ctx, logMsg)
|
||||
// Append prebuilt.source to Version string for the User Agent
|
||||
cmd.cfg.Version += "+prebuilt." + cmd.prebuiltConfig
|
||||
|
||||
parsed, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration: %w", err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
if len(cmd.prebuiltConfigs) > 0 {
|
||||
slices.Sort(cmd.prebuiltConfigs)
|
||||
sourcesList := strings.Join(cmd.prebuiltConfigs, ", ")
|
||||
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
|
||||
cmd.logger.InfoContext(ctx, logMsg)
|
||||
|
||||
for _, configName := range cmd.prebuiltConfigs {
|
||||
buf, err := prebuiltconfigs.Get(configName)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
|
||||
// Update version string
|
||||
cmd.cfg.Version += "+prebuilt." + configName
|
||||
|
||||
// Parse into ToolsFile struct
|
||||
parsed, err := parseToolsFile(ctx, buf)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, parsed)
|
||||
}
|
||||
allToolsFiles = append(allToolsFiles, parsed)
|
||||
}
|
||||
|
||||
// Determine if Custom Files should be loaded
|
||||
@@ -890,7 +897,7 @@ func run(cmd *Command) error {
|
||||
isCustomConfigured := cmd.tools_file != "" || len(cmd.tools_files) > 0 || cmd.tools_folder != ""
|
||||
|
||||
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
|
||||
useDefaultToolsFile := cmd.prebuiltConfig == "" && !isCustomConfigured
|
||||
useDefaultToolsFile := len(cmd.prebuiltConfigs) == 0 && !isCustomConfigured
|
||||
|
||||
if useDefaultToolsFile {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
|
||||
@@ -420,17 +420,27 @@ func TestPrebuiltFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
args []string
|
||||
want string
|
||||
want []string
|
||||
}{
|
||||
{
|
||||
desc: "default value",
|
||||
args: []string{},
|
||||
want: "",
|
||||
want: []string{},
|
||||
},
|
||||
{
|
||||
desc: "custom pre built flag",
|
||||
args: []string{"--tools-file", "alloydb"},
|
||||
want: "alloydb",
|
||||
desc: "single prebuilt flag",
|
||||
args: []string{"--prebuilt", "alloydb"},
|
||||
want: []string{"alloydb"},
|
||||
},
|
||||
{
|
||||
desc: "multiple prebuilt flags",
|
||||
args: []string{"--prebuilt", "alloydb", "--prebuilt", "bigquery"},
|
||||
want: []string{"alloydb", "bigquery"},
|
||||
},
|
||||
{
|
||||
desc: "comma separated prebuilt flags",
|
||||
args: []string{"--prebuilt", "alloydb,bigquery"},
|
||||
want: []string{"alloydb", "bigquery"},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
@@ -439,8 +449,8 @@ func TestPrebuiltFlag(t *testing.T) {
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error invoking command: %s", err)
|
||||
}
|
||||
if c.tools_file != tc.want {
|
||||
t.Fatalf("got %v, want %v", c.cfg, tc.want)
|
||||
if diff := cmp.Diff(c.prebuiltConfigs, tc.want); diff != "" {
|
||||
t.Fatalf("got %v, want %v, diff %s", c.prebuiltConfigs, tc.want, diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -1493,7 +1503,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1503,7 +1513,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1513,7 +1523,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -2063,6 +2073,12 @@ authSources:
|
||||
return nil
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "sqlite called twice error",
|
||||
args: []string{"--prebuilt", "sqlite", "--prebuilt", "sqlite"},
|
||||
wantErr: true,
|
||||
errString: "resource conflicts detected",
|
||||
},
|
||||
{
|
||||
desc: "tool conflict error",
|
||||
args: []string{"--prebuilt", "sqlite", "--tools-file", toolConflictFile},
|
||||
|
||||
@@ -48,7 +48,6 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -300,7 +299,6 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,7 +48,6 @@ database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -300,7 +299,6 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -48,7 +48,6 @@ instance, database and users:
|
||||
* `roles/cloudsql.editor`: Provides permissions to manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* `roles/cloudsql.admin`: Provides full control over all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
* `create_instance`
|
||||
@@ -300,7 +299,6 @@ instances and interacting with your database:
|
||||
* **create_user**: Creates a new user in a Cloud SQL instance.
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -16,7 +16,7 @@ description: >
|
||||
| | `--log-level` | Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'. | `info` |
|
||||
| | `--logging-format` | Specify logging format to use. Allowed: 'standard' or 'JSON'. | `standard` |
|
||||
| `-p` | `--port` | Port the server will listen on. | `5000` |
|
||||
| | `--prebuilt` | Use a prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--prebuilt` | Use one or more prebuilt tool configuration by source type. See [Prebuilt Tools Reference](prebuilt-tools.md) for allowed values. | |
|
||||
| | `--stdio` | Listens via MCP STDIO instead of acting as a remote HTTP server. | |
|
||||
| | `--telemetry-gcp` | Enable exporting directly to Google Cloud Monitoring. | |
|
||||
| | `--telemetry-otlp` | Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318') | |
|
||||
@@ -50,6 +50,11 @@ description: >
|
||||
|
||||
# Server with prebuilt + custom tools configurations
|
||||
./toolbox --tools-file tools.yaml --prebuilt alloydb-postgres
|
||||
|
||||
# Server with multiple prebuilt tools configurations
|
||||
./toolbox --prebuilt alloydb-postgres,alloydb-postgres-admin
|
||||
# OR
|
||||
./toolbox --prebuilt alloydb-postgres --prebuilt alloydb-postgres-admin
|
||||
```
|
||||
|
||||
### Tool Configuration Sources
|
||||
@@ -70,7 +75,7 @@ The CLI supports multiple mutually exclusive ways to specify tool configurations
|
||||
|
||||
**Prebuilt Configurations:**
|
||||
|
||||
- `--prebuilt`: Use predefined configurations for specific database types (e.g.,
|
||||
- `--prebuilt`: Use one or more predefined configurations for specific database types (e.g.,
|
||||
'bigquery', 'postgres', 'spanner'). See [Prebuilt Tools
|
||||
Reference](prebuilt-tools.md) for allowed values.
|
||||
|
||||
|
||||
@@ -16,6 +16,9 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
{{< notice tip >}}
|
||||
You can now use `--prebuilt` along `--tools-file`, `--tools-files`, or
|
||||
`--tools-folder` to combine prebuilt configs with custom tools.
|
||||
|
||||
You can also combine multiple prebuilt configs.
|
||||
|
||||
See [Usage Examples](../reference/cli.md#examples).
|
||||
{{< /notice >}}
|
||||
|
||||
@@ -187,7 +190,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -204,7 +206,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -277,7 +278,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -293,7 +293,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -340,7 +339,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
manage existing resources.
|
||||
* All `viewer` tools
|
||||
* `create_database`
|
||||
* `create_backup`
|
||||
* **Cloud SQL Admin** (`roles/cloudsql.admin`): Provides full control over
|
||||
all resources.
|
||||
* All `editor` and `viewer` tools
|
||||
@@ -356,7 +354,6 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_user`: Creates a new user in a Cloud SQL instance.
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
---
|
||||
title: cloud-sql-create-backup
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Creates a backup on a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-create-backup` tool creates an on-demand backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic backup creation (current state)
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
backup-creation-basic:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
description: "Creates a backup on the given Cloud SQL instance."
|
||||
```
|
||||
## Reference
|
||||
### Tool Configuration
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------------------- |
|
||||
| kind | string | true | Must be "cloud-sql-create-backup". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| -------------------------- | :------: | :----------: | ------------------------------------------------------------------------------- |
|
||||
| project | string | true | The project ID. |
|
||||
| instance | string | true | The name of the instance to take a backup on. Does not include the project ID. |
|
||||
| location | string | false | (Optional) Location of the backup run. |
|
||||
| backup_description | string | false | (Optional) The description of this backup run. |
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Backup API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/backups)
|
||||
@@ -1,7 +0,0 @@
|
||||
---
|
||||
title: "Neo4j"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started with Toolbox using Neo4j.
|
||||
---
|
||||
@@ -1,141 +0,0 @@
|
||||
---
|
||||
title: "Quickstart (MCP with Neo4j)"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
How to get started running Toolbox with MCP Inspector and Neo4j as the source.
|
||||
---
|
||||
|
||||
## Overview
|
||||
|
||||
[Model Context Protocol](https://modelcontextprotocol.io) is an open protocol that standardizes how applications provide context to LLMs. Check out this page on how to [connect to Toolbox via MCP](../../how-to/connect_via_mcp.md).
|
||||
|
||||
|
||||
## Step 1: Set up your Neo4j Database and Data
|
||||
|
||||
In this section, you'll set up a database and populate it with sample data for a movies-related agent. This guide assumes you have a running Neo4j instance, either locally or in the cloud.
|
||||
|
||||
. **Populate the database with data.**
|
||||
To make this quickstart straightforward, we'll use the built-in Movies dataset available in Neo4j.
|
||||
|
||||
. In your Neo4j Browser, run the following command to create and populate the database:
|
||||
+
|
||||
```cypher
|
||||
:play movies
|
||||
````
|
||||
|
||||
. Follow the instructions to load the data. This will create a graph with `Movie`, `Person`, and `Actor` nodes and their relationships.
|
||||
|
||||
|
||||
## Step 2: Install and configure Toolbox
|
||||
|
||||
In this section, we will install the MCP Toolbox, configure our tools in a `tools.yaml` file, and then run the Toolbox server.
|
||||
|
||||
. **Install the Toolbox binary.**
|
||||
The simplest way to get started is to download the latest binary for your operating system.
|
||||
|
||||
. Download the latest version of Toolbox as a binary:
|
||||
\+
|
||||
|
||||
```bash
|
||||
export OS="linux/amd64" # one of linux/amd64, darwin/arm64, darwin/amd64, or windows/amd64
|
||||
curl -O [https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox](https://storage.googleapis.com/genai-toolbox/v0.16.0/$OS/toolbox)
|
||||
```
|
||||
|
||||
+
|
||||
. Make the binary executable:
|
||||
\+
|
||||
|
||||
```bash
|
||||
chmod +x toolbox
|
||||
```
|
||||
|
||||
. **Create the `tools.yaml` file.**
|
||||
This file defines your Neo4j source and the specific tools that will be exposed to your AI agent.
|
||||
\+
|
||||
{{\< notice tip \>}}
|
||||
Authentication for the Neo4j source uses standard username and password fields. For production use, it is highly recommended to use environment variables for sensitive information like passwords.
|
||||
{{\< /notice \>}}
|
||||
\+
|
||||
Write the following into a `tools.yaml` file:
|
||||
\+
|
||||
|
||||
```yaml
|
||||
sources:
|
||||
my-neo4j-source:
|
||||
kind: neo4j
|
||||
uri: bolt://localhost:7687
|
||||
user: neo4j
|
||||
password: my-password # Replace with your actual password
|
||||
|
||||
tools:
|
||||
search-movies-by-actor:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Searches for movies an actor has appeared in based on their name. Useful for questions like 'What movies has Tom Hanks been in?'"
|
||||
parameters:
|
||||
- name: actor_name
|
||||
type: string
|
||||
description: The full name of the actor to search for.
|
||||
statement: |
|
||||
MATCH (p:Person {name: $actor_name}) -[:ACTED_IN]-> (m:Movie)
|
||||
RETURN m.title AS title, m.year AS year, m.genre AS genre
|
||||
|
||||
get-actor-for-movie:
|
||||
kind: neo4j-cypher
|
||||
source: my-neo4j-source
|
||||
description: "Finds the actors who starred in a specific movie. Useful for questions like 'Who acted in Inception?'"
|
||||
parameters:
|
||||
- name: movie_title
|
||||
type: string
|
||||
description: The exact title of the movie.
|
||||
statement: |
|
||||
MATCH (p:Person) -[:ACTED_IN]-> (m:Movie {title: $movie_title})
|
||||
RETURN p.name AS actor
|
||||
```
|
||||
|
||||
. **Start the Toolbox server.**
|
||||
Run the Toolbox server, pointing to the `tools.yaml` file you created earlier.
|
||||
\+
|
||||
|
||||
```bash
|
||||
./toolbox --tools-file "tools.yaml"
|
||||
```
|
||||
|
||||
## Step 3: Connect to MCP Inspector
|
||||
|
||||
. **Run the MCP Inspector:**
|
||||
\+
|
||||
|
||||
```bash
|
||||
npx @modelcontextprotocol/inspector
|
||||
```
|
||||
|
||||
. Type `y` when it asks to install the inspector package.
|
||||
. It should show the following when the MCP Inspector is up and running (please take note of `<YOUR_SESSION_TOKEN>`):
|
||||
\+
|
||||
|
||||
```bash
|
||||
Starting MCP inspector...
|
||||
⚙️ Proxy server listening on localhost:6277
|
||||
🔑 Session token: <YOUR_SESSION_TOKEN>
|
||||
Use this token to authenticate requests or set DANGEROUSLY_OMIT_AUTH=true to disable auth
|
||||
|
||||
🚀 MCP Inspector is up and running at:
|
||||
http://localhost:6274/?MCP_PROXY_AUTH_TOKEN=<YOUR_SESSION_TOKEN>
|
||||
```
|
||||
|
||||
1. Open the above link in your browser.
|
||||
|
||||
1. For `Transport Type`, select `Streamable HTTP`.
|
||||
|
||||
1. For `URL`, type in `http://127.0.0.1:5000/mcp`.
|
||||
|
||||
1. For `Configuration` -\> `Proxy Session Token`, make sure `<YOUR_SESSION_TOKEN>` is present.
|
||||
|
||||
1. Click `Connect`.
|
||||
|
||||
1. Select `List Tools`, you will see a list of tools configured in `tools.yaml`.
|
||||
|
||||
1. Test out your tools here\!
|
||||
|
||||
@@ -43,9 +43,6 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mssql_admin_tools:
|
||||
@@ -57,4 +54,3 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
@@ -43,9 +43,6 @@ tools:
|
||||
clone_instance:
|
||||
kind: cloud-sql-clone-instance
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mysql_admin_tools:
|
||||
@@ -57,4 +54,3 @@ toolsets:
|
||||
- create_user
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
@@ -46,9 +46,6 @@ tools:
|
||||
postgres_upgrade_precheck:
|
||||
kind: postgres-upgrade-precheck
|
||||
source: cloud-sql-admin-source
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_admin_tools:
|
||||
@@ -61,4 +58,3 @@ toolsets:
|
||||
- wait_for_operation
|
||||
- postgres_upgrade_precheck
|
||||
- clone_instance
|
||||
- create_backup
|
||||
|
||||
@@ -352,28 +352,6 @@ func (s *Source) GetWaitForOperations(ctx context.Context, service *sqladmin.Ser
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (s *Source) InsertBackupRun(ctx context.Context, project, instance, location, backupDescription, accessToken string) (any, error) {
|
||||
backupRun := &sqladmin.BackupRun{}
|
||||
if location != "" {
|
||||
backupRun.Location = location
|
||||
}
|
||||
if backupDescription != "" {
|
||||
backupRun.Description = backupDescription
|
||||
}
|
||||
|
||||
service, err := s.GetService(ctx, string(accessToken))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := service.BackupRuns.Insert(project, instance, backupRun).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating backup: %w", err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func generateCloudSQLConnectionMessage(ctx context.Context, source *Source, logger log.Logger, opResponse map[string]any, connectionMessageTemplate string) (string, bool) {
|
||||
operationType, ok := opResponse["operationType"].(string)
|
||||
if !ok || operationType != "CREATE_DATABASE" {
|
||||
|
||||
@@ -1,180 +0,0 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlcreatebackup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"google.golang.org/api/sqladmin/v1"
|
||||
)
|
||||
|
||||
const kind string = "cloud-sql-create-backup"
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
type compatibleSource interface {
|
||||
GetDefaultProject() string
|
||||
GetService(context.Context, string) (*sqladmin.Service, error)
|
||||
UseClientAuthorization() bool
|
||||
InsertBackupRun(ctx context.Context, project, instance, location, backupDescription, accessToken string) (any, error)
|
||||
}
|
||||
|
||||
// Config defines the configuration for the create-backup tool.
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Description string `yaml:"description"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
// ToolConfigKind returns the kind of the tool.
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
// Initialize initializes the tool from the configuration.
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source %q not compatible", kind, cfg.Source)
|
||||
}
|
||||
|
||||
project := s.GetDefaultProject()
|
||||
var projectParam parameters.Parameter
|
||||
if project != "" {
|
||||
projectParam = parameters.NewStringParameterWithDefault("project", project, "The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one.")
|
||||
} else {
|
||||
projectParam = parameters.NewStringParameter("project", "The project ID")
|
||||
}
|
||||
|
||||
allParameters := parameters.Parameters{
|
||||
projectParam,
|
||||
parameters.NewStringParameter("instance", "Cloud SQL instance ID. This does not include the project ID."),
|
||||
// Location and backup_description are optional.
|
||||
parameters.NewStringParameterWithRequired("location", "Location of the backup run.", false),
|
||||
parameters.NewStringParameterWithRequired("backup_description", "The description of this backup run.", false),
|
||||
}
|
||||
paramManifest := allParameters.Manifest()
|
||||
|
||||
description := cfg.Description
|
||||
if description == "" {
|
||||
description = "Creates a backup on a Cloud SQL instance."
|
||||
}
|
||||
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters, nil)
|
||||
|
||||
return Tool{
|
||||
Config: cfg,
|
||||
AllParams: allParameters,
|
||||
manifest: tools.Manifest{Description: description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Tool represents the create-backup tool.
|
||||
type Tool struct {
|
||||
Config
|
||||
AllParams parameters.Parameters `yaml:"allParams"`
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) ToConfig() tools.ToolConfig {
|
||||
return t.Config
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Kind)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
project, ok := paramsMap["project"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error casting 'project' parameter: %v", paramsMap["project"])
|
||||
}
|
||||
instance, ok := paramsMap["instance"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error casting 'instance' parameter: %v", paramsMap["instance"])
|
||||
}
|
||||
|
||||
location, _ := paramsMap["location"].(string)
|
||||
description, _ := paramsMap["backup_description"].(string)
|
||||
|
||||
return source.InsertBackupRun(ctx, project, instance, location, description, string(accessToken))
|
||||
}
|
||||
|
||||
// ParseParams parses the parameters for the tool.
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
// McpManifest returns the tool's MCP manifest.
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
// Authorized checks if the tool is authorized.
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) {
|
||||
source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Kind)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return source.UseClientAuthorization(), nil
|
||||
}
|
||||
|
||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||
return "Authorization", nil
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlcreatebackup_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
|
||||
)
|
||||
|
||||
func TestParseFromYaml(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
create-backup-tool:
|
||||
kind: cloud-sql-create-backup
|
||||
description: a test description
|
||||
source: a-source
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"create-backup-tool": cloudsqlcreatebackup.Config{
|
||||
Name: "create-backup-tool",
|
||||
Kind: "cloud-sql-create-backup",
|
||||
Description: "a test description",
|
||||
Source: "a-source",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
"$schema": "https://static.modelcontextprotocol.io/schemas/2025-12-11/server.schema.json",
|
||||
"name": "io.github.googleapis/genai-toolbox",
|
||||
"description": "MCP Toolbox for Databases enables your agent to connect to your database.",
|
||||
"title": "MCP Toolbox for Databases",
|
||||
"title": "MCP Toolbox",
|
||||
"websiteUrl": "https://github.com/googleapis/genai-toolbox",
|
||||
"icons": [
|
||||
{
|
||||
|
||||
@@ -1,232 +0,0 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
"google.golang.org/api/sqladmin/v1"
|
||||
)
|
||||
|
||||
var (
|
||||
createBackupToolKind = "cloud-sql-create-backup"
|
||||
)
|
||||
|
||||
type createBackupTransport struct {
|
||||
transport http.RoundTripper
|
||||
url *url.URL
|
||||
}
|
||||
|
||||
func (t *createBackupTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if strings.HasPrefix(req.URL.String(), "https://sqladmin.googleapis.com") {
|
||||
req.URL.Scheme = t.url.Scheme
|
||||
req.URL.Host = t.url.Host
|
||||
}
|
||||
return t.transport.RoundTrip(req)
|
||||
}
|
||||
|
||||
type mastercreateBackupHandler struct {
|
||||
t *testing.T
|
||||
}
|
||||
|
||||
func (h *mastercreateBackupHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if !strings.Contains(r.UserAgent(), "genai-toolbox/") {
|
||||
h.t.Errorf("User-Agent header not found")
|
||||
}
|
||||
var backupRun sqladmin.BackupRun
|
||||
if err := json.NewDecoder(r.Body).Decode(&backupRun); err != nil {
|
||||
h.t.Fatalf("failed to decode request body: %v", err)
|
||||
} else {
|
||||
h.t.Logf("Received request body: %+v", backupRun)
|
||||
}
|
||||
|
||||
var expectedBackupRun sqladmin.BackupRun
|
||||
var response any
|
||||
var statusCode int
|
||||
|
||||
switch backupRun.Description {
|
||||
case "":
|
||||
expectedBackupRun = sqladmin.BackupRun{}
|
||||
response = map[string]any{"name": "op1", "status": "PENDING"}
|
||||
statusCode = http.StatusOK
|
||||
case "test desc":
|
||||
expectedBackupRun = sqladmin.BackupRun{Location: "us-central1", Description: "test desc"}
|
||||
response = map[string]any{"name": "op1", "status": "PENDING"}
|
||||
statusCode = http.StatusOK
|
||||
default:
|
||||
http.Error(w, fmt.Sprintf("unhandled instance name: %s", backupRun.Instance), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(expectedBackupRun, backupRun); diff != "" {
|
||||
h.t.Errorf("unexpected request body (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(statusCode)
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateBackupToolEndpoints(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
handler := &mastercreateBackupHandler{t: t}
|
||||
server := httptest.NewServer(handler)
|
||||
defer server.Close()
|
||||
|
||||
serverURL, err := url.Parse(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse server URL: %v", err)
|
||||
}
|
||||
|
||||
originalTransport := http.DefaultClient.Transport
|
||||
if originalTransport == nil {
|
||||
originalTransport = http.DefaultTransport
|
||||
}
|
||||
http.DefaultClient.Transport = &createBackupTransport{
|
||||
transport: originalTransport,
|
||||
url: serverURL,
|
||||
}
|
||||
t.Cleanup(func() {
|
||||
http.DefaultClient.Transport = originalTransport
|
||||
})
|
||||
|
||||
var args []string
|
||||
toolsFile := getCreateBackupToolsConfig()
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
toolName string
|
||||
body string
|
||||
want string
|
||||
expectError bool
|
||||
errorStatus int
|
||||
}{
|
||||
{
|
||||
name: "successful backup creation with no optional parameters",
|
||||
toolName: "create-backup",
|
||||
body: `{"project": "p1", "instance": "instance-no-optional"}`,
|
||||
want: `{"name":"op1","status":"PENDING"}`,
|
||||
},
|
||||
{
|
||||
name: "successful backup creation with optional parameters",
|
||||
toolName: "create-backup",
|
||||
body: `{"project": "p1", "instance": "instance-optional", "location": "us-central1", "backup_description": "test desc"}`,
|
||||
want: `{"name":"op1","status":"PENDING"}`,
|
||||
},
|
||||
{
|
||||
name: "missing instance name",
|
||||
toolName: "create-backup",
|
||||
body: `{"project": "p1", "escription": "invalid"}`,
|
||||
expectError: true,
|
||||
errorStatus: http.StatusBadRequest,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
|
||||
req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if tc.expectError {
|
||||
if resp.StatusCode != tc.errorStatus {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("expected status %d but got %d: %s", tc.errorStatus, resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
t.Fatalf("failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
var got, want map[string]any
|
||||
if err := json.Unmarshal([]byte(result.Result), &got); err != nil {
|
||||
t.Fatalf("failed to unmarshal result: %v", err)
|
||||
}
|
||||
if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
|
||||
t.Fatalf("failed to unmarshal want: %v", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected result: got %+v, want %+v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func getCreateBackupToolsConfig() map[string]any {
|
||||
return map[string]any{
|
||||
"sources": map[string]any{
|
||||
"my-cloud-sql-source": map[string]any{
|
||||
"kind": "cloud-sql-admin",
|
||||
},
|
||||
},
|
||||
"tools": map[string]any{
|
||||
"create-backup": map[string]any{
|
||||
"kind": createBackupToolKind,
|
||||
"source": "my-cloud-sql-source",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -85,66 +85,13 @@ func initDataplexConnection(ctx context.Context) (*dataplex.CatalogClient, error
|
||||
return client, nil
|
||||
}
|
||||
|
||||
// cleanupOldAspectTypes Deletes AspectTypes older than the specified duration.
|
||||
func cleanupOldAspectTypes(t *testing.T, ctx context.Context, client *dataplex.CatalogClient, oldThreshold time.Duration) {
|
||||
parent := fmt.Sprintf("projects/%s/locations/us", DataplexProject)
|
||||
olderThanTime := time.Now().Add(-oldThreshold)
|
||||
|
||||
listReq := &dataplexpb.ListAspectTypesRequest{
|
||||
Parent: parent,
|
||||
PageSize: 100, // Fetch up to 100 items
|
||||
OrderBy: "create_time asc", // Order by creation time
|
||||
}
|
||||
|
||||
const maxDeletes = 8 // Explicitly limit the number of deletions
|
||||
it := client.ListAspectTypes(ctx, listReq)
|
||||
var aspectTypesToDelete []string
|
||||
for len(aspectTypesToDelete) < maxDeletes {
|
||||
aspectType, err := it.Next()
|
||||
if err == iterator.Done {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
t.Logf("Warning: Failed to list aspect types during cleanup: %v", err)
|
||||
return
|
||||
}
|
||||
// Perform time-based filtering in memory
|
||||
if aspectType.CreateTime != nil {
|
||||
createTime := aspectType.CreateTime.AsTime()
|
||||
if createTime.Before(olderThanTime) {
|
||||
aspectTypesToDelete = append(aspectTypesToDelete, aspectType.GetName())
|
||||
}
|
||||
} else {
|
||||
t.Logf("Warning: AspectType %s has no CreateTime", aspectType.GetName())
|
||||
}
|
||||
}
|
||||
if len(aspectTypesToDelete) == 0 {
|
||||
t.Logf("cleanupOldAspectTypes: No aspect types found older than %s to delete.", oldThreshold.String())
|
||||
return
|
||||
}
|
||||
|
||||
for _, aspectTypeName := range aspectTypesToDelete {
|
||||
deleteReq := &dataplexpb.DeleteAspectTypeRequest{Name: aspectTypeName}
|
||||
op, err := client.DeleteAspectType(ctx, deleteReq)
|
||||
if err != nil {
|
||||
t.Logf("Warning: Failed to delete aspect type %s: %v", aspectTypeName, err)
|
||||
continue // Skip to the next item if initiation fails
|
||||
}
|
||||
|
||||
if err := op.Wait(ctx); err != nil {
|
||||
t.Logf("Warning: Failed to delete aspect type %s, operation error: %v", aspectTypeName, err)
|
||||
} else {
|
||||
t.Logf("cleanupOldAspectTypes: Successfully deleted %s", aspectTypeName)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDataplexToolEndpoints(t *testing.T) {
|
||||
sourceConfig := getDataplexVars(t)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
var args []string
|
||||
|
||||
bigqueryClient, err := initBigQueryConnection(ctx, DataplexProject)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
|
||||
@@ -155,9 +102,6 @@ func TestDataplexToolEndpoints(t *testing.T) {
|
||||
t.Fatalf("unable to create Dataplex connection: %s", err)
|
||||
}
|
||||
|
||||
// Cleanup older aspecttypes
|
||||
cleanupOldAspectTypes(t, ctx, dataplexClient, 1*time.Hour)
|
||||
|
||||
// create resources with UUID
|
||||
datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
|
||||
Reference in New Issue
Block a user