mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-22 13:58:05 -05:00
Compare commits
20 Commits
config-pre
...
alloydb-om
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6623d9bb91 | ||
|
|
0bc3ce5468 | ||
|
|
5b8f83b113 | ||
|
|
3e041b61e8 | ||
|
|
2d2817c2a8 | ||
|
|
6f1a04ad3d | ||
|
|
540826844e | ||
|
|
12abd9ab96 | ||
|
|
e4f60e5633 | ||
|
|
d7af21bdde | ||
|
|
adc9589766 | ||
|
|
c25a2330fe | ||
|
|
6e09b08c6a | ||
|
|
1f15a111f1 | ||
|
|
dfddeb528d | ||
|
|
00c3e6d8cb | ||
|
|
d00b6fdf18 | ||
|
|
4d23a3bbf2 | ||
|
|
5e0999ebf5 | ||
|
|
6b02591703 |
@@ -87,7 +87,7 @@ steps:
|
||||
- "CLOUD_SQL_POSTGRES_REGION=$_REGION"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv:
|
||||
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID"]
|
||||
["CLOUD_SQL_POSTGRES_USER", "CLOUD_SQL_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -134,7 +134,7 @@ steps:
|
||||
- "ALLOYDB_POSTGRES_DATABASE=$_DATABASE_NAME"
|
||||
- "ALLOYDB_POSTGRES_REGION=$_REGION"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID"]
|
||||
secretEnv: ["ALLOYDB_POSTGRES_USER", "ALLOYDB_POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -171,6 +171,23 @@ steps:
|
||||
alloydbainl \
|
||||
alloydbainl
|
||||
|
||||
- id: "alloydb-omni"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
.ci/test_with_coverage.sh \
|
||||
"AlloyDB Omni" \
|
||||
alloydbomni \
|
||||
postgres
|
||||
|
||||
- id: "bigtable"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -293,7 +310,7 @@ steps:
|
||||
.ci/test_with_coverage.sh \
|
||||
"Cloud Healthcare API" \
|
||||
cloudhealthcare \
|
||||
cloudhealthcare || echo "Integration tests failed."
|
||||
cloudhealthcare
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
@@ -305,7 +322,7 @@ steps:
|
||||
- "POSTGRES_HOST=$_POSTGRES_HOST"
|
||||
- "POSTGRES_PORT=$_POSTGRES_PORT"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID"]
|
||||
secretEnv: ["POSTGRES_USER", "POSTGRES_PASS", "CLIENT_ID", "API_KEY"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
@@ -657,7 +674,7 @@ steps:
|
||||
"Looker" \
|
||||
looker \
|
||||
looker
|
||||
|
||||
|
||||
- id: "mindsdb"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -845,7 +862,7 @@ steps:
|
||||
"Snowflake" \
|
||||
snowflake \
|
||||
snowflake
|
||||
|
||||
|
||||
- id: "cassandra"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -890,7 +907,7 @@ steps:
|
||||
go test -v ./internal/sources/oracle/... \
|
||||
-coverprofile=oracle_coverage.out \
|
||||
-coverpkg=./internal/sources/oracle/...,./internal/tools/oracle/...
|
||||
|
||||
|
||||
# Coverage check
|
||||
total_coverage=$(go tool cover -func=oracle_coverage.out | grep "total:" | awk '{print $3}')
|
||||
echo "Oracle total coverage: $total_coverage"
|
||||
@@ -964,6 +981,13 @@ steps:
|
||||
|
||||
availableSecrets:
|
||||
secretManager:
|
||||
# Common secrets
|
||||
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
||||
env: CLIENT_ID
|
||||
- versionName: projects/$PROJECT_ID/secrets/api_key/versions/latest
|
||||
env: API_KEY
|
||||
|
||||
# Resource-specific secrets
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_user/versions/latest
|
||||
env: CLOUD_SQL_POSTGRES_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/cloud_sql_pg_pass/versions/latest
|
||||
@@ -980,8 +1004,6 @@ availableSecrets:
|
||||
env: POSTGRES_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/postgres_pass/versions/latest
|
||||
env: POSTGRES_PASS
|
||||
- versionName: projects/$PROJECT_ID/secrets/client_id/versions/latest
|
||||
env: CLIENT_ID
|
||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_user/versions/latest
|
||||
env: NEO4J_USER
|
||||
- versionName: projects/$PROJECT_ID/secrets/neo4j_pass/versions/latest
|
||||
|
||||
@@ -98,6 +98,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||
@@ -385,6 +386,7 @@ func NewCommand(opts ...Option) *Command {
|
||||
// TODO: Insecure by default. Might consider updating this for v1.0.0
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedOrigins, "allowed-origins", []string{"*"}, "Specifies a list of origins permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.AllowedHosts, "allowed-hosts", []string{"*"}, "Specifies a list of hosts permitted to access this server. Defaults to '*'.")
|
||||
flags.StringSliceVar(&cmd.cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
cmd.RunE = func(*cobra.Command, []string) error { return run(cmd) }
|
||||
|
||||
@@ -70,6 +70,9 @@ func withDefaults(c server.ServerConfig) server.ServerConfig {
|
||||
if c.AllowedHosts == nil {
|
||||
c.AllowedHosts = []string{"*"}
|
||||
}
|
||||
if c.UserAgentMetadata == nil {
|
||||
c.UserAgentMetadata = []string{}
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -230,6 +233,13 @@ func TestServerConfigFlags(t *testing.T) {
|
||||
AllowedHosts: []string{"http://foo.com", "http://bar.com"},
|
||||
}),
|
||||
},
|
||||
{
|
||||
desc: "user agent metadata",
|
||||
args: []string{"--user-agent-metadata", "foo,bar"},
|
||||
want: withDefaults(server.ServerConfig{
|
||||
UserAgentMetadata: []string{"foo", "bar"},
|
||||
}),
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
@@ -1493,7 +1503,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_postgres_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_postgres_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "postgres_upgrade_precheck", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1503,7 +1513,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mysql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mysql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1513,7 +1523,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud_sql_mssql_admin_tools": tools.ToolsetConfig{
|
||||
Name: "cloud_sql_mssql_admin_tools",
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup"},
|
||||
ToolNames: []string{"create_instance", "get_instance", "list_instances", "create_database", "list_databases", "create_user", "wait_for_operation", "clone_instance", "create_backup", "restore_backup"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -54,6 +54,7 @@ instance, database and users:
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for SQL Server instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -54,6 +54,7 @@ database and users:
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for MySQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -54,6 +54,7 @@ instance, database and users:
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
## Install MCP Toolbox
|
||||
|
||||
@@ -301,6 +302,7 @@ instances and interacting with your database:
|
||||
* **wait_for_operation**: Waits for a Cloud SQL operation to complete.
|
||||
* **clone_instance**: Creates a clone of an existing Cloud SQL for PostgreSQL instance.
|
||||
* **create_backup**: Creates a backup on a Cloud SQL instance.
|
||||
* **restore_backup**: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
{{< notice note >}}
|
||||
Prebuilt tools are pre-1.0, so expect some tool changes between versions. LLMs
|
||||
|
||||
@@ -20,6 +20,7 @@ The native SDKs can be combined with MCP clients in many cases.
|
||||
|
||||
Toolbox currently supports the following versions of MCP specification:
|
||||
|
||||
* [2025-11-25](https://modelcontextprotocol.io/specification/2025-11-25)
|
||||
* [2025-06-18](https://modelcontextprotocol.io/specification/2025-06-18)
|
||||
* [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26)
|
||||
* [2024-11-05](https://modelcontextprotocol.io/specification/2024-11-05)
|
||||
|
||||
@@ -207,6 +207,7 @@ You can connect to Toolbox Cloud Run instances directly through the SDK.
|
||||
{{< tab header="Python" lang="python" >}}
|
||||
import asyncio
|
||||
from toolbox_core import ToolboxClient, auth_methods
|
||||
from toolbox_core.protocol import Protocol
|
||||
|
||||
# Replace with the Cloud Run service URL generated in the previous step
|
||||
URL = "https://cloud-run-url.app"
|
||||
@@ -217,6 +218,7 @@ async def main():
|
||||
async with ToolboxClient(
|
||||
URL,
|
||||
client_headers={"Authorization": auth_token_provider},
|
||||
protocol=Protocol.TOOLBOX,
|
||||
) as toolbox:
|
||||
toolset = await toolbox.load_toolset()
|
||||
# ...
|
||||
@@ -281,3 +283,5 @@ contain the specific error message needed to diagnose the problem.
|
||||
Manager, it means the Toolbox service account is missing permissions.
|
||||
- Ensure the `toolbox-identity` service account has the **Secret Manager
|
||||
Secret Accessor** (`roles/secretmanager.secretAccessor`) IAM role.
|
||||
|
||||
- **Cloud Run Connections via IAP:** Currently we do not support Cloud Run connections via [IAP](https://docs.cloud.google.com/iap/docs/concepts-overview). Please disable IAP if you are using it.
|
||||
@@ -27,6 +27,7 @@ description: >
|
||||
| | `--ui` | Launches the Toolbox UI web server. | |
|
||||
| | `--allowed-origins` | Specifies a list of origins permitted to access this server for CORs access. | `*` |
|
||||
| | `--allowed-hosts` | Specifies a list of hosts permitted to access this server to prevent DNS rebinding attacks. | `*` |
|
||||
| | `--user-agent-extra` | Appends additional metadata to the User-Agent. | |
|
||||
| `-v` | `--version` | version for toolbox | |
|
||||
|
||||
## Examples
|
||||
|
||||
@@ -194,6 +194,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for MySQL instance.
|
||||
@@ -205,6 +206,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for MySQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for PostgreSQL
|
||||
|
||||
@@ -284,6 +286,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for PostgreSQL instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -294,6 +297,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Cloud SQL for SQL Server
|
||||
|
||||
@@ -347,6 +351,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `create_instance`
|
||||
* `create_user`
|
||||
* `clone_instance`
|
||||
* `restore_backup`
|
||||
* **Tools:**
|
||||
* `create_instance`: Creates a new Cloud SQL for SQL Server instance.
|
||||
* `get_instance`: Gets information about a Cloud SQL instance.
|
||||
@@ -357,6 +362,7 @@ See [Usage Examples](../reference/cli.md#examples).
|
||||
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
|
||||
* `clone_instance`: Creates a clone for an existing Cloud SQL for SQL Server instance.
|
||||
* `create_backup`: Creates a backup on a Cloud SQL instance.
|
||||
* `restore_backup`: Restores a backup of a Cloud SQL instance.
|
||||
|
||||
## Dataplex
|
||||
|
||||
|
||||
@@ -12,6 +12,9 @@ aliases:
|
||||
|
||||
The `cloud-gemini-data-analytics-query` tool allows you to send natural language questions to the Gemini Data Analytics API and receive structured responses containing SQL queries, natural language answers, and explanations. For details on defining data agent context for database data sources, see the official [documentation](https://docs.cloud.google.com/gemini/docs/conversational-analytics-api/data-agent-authored-context-databases).
|
||||
|
||||
> [!NOTE]
|
||||
> Only `alloydb`, `spannerReference`, and `cloudSqlReference` are supported as [datasource references](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1beta/projects.locations.dataAgents#DatasourceReferences).
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -41,13 +44,13 @@ tools:
|
||||
|
||||
### Usage Flow
|
||||
|
||||
When using this tool, a `prompt` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||
When using this tool, a `query` parameter containing a natural language query is provided to the tool (typically by an agent). The tool then interacts with the Gemini Data Analytics API using the context defined in your configuration.
|
||||
|
||||
The structure of the response depends on the `generationOptions` configured in your tool definition (e.g., enabling `generateQueryResult` will include the SQL query results).
|
||||
|
||||
See [Data Analytics API REST documentation](https://clouddocs.devsite.corp.google.com/gemini/docs/conversational-analytics-api/reference/rest/v1alpha/projects.locations/queryData?rep_location=global) for details.
|
||||
|
||||
**Example Input Prompt:**
|
||||
**Example Input Query:**
|
||||
|
||||
```text
|
||||
How many accounts who have region in Prague are eligible for loans? A3 contains the data of region.
|
||||
|
||||
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
53
docs/en/resources/tools/cloudsql/cloudsqlrestorebackup.md
Normal file
@@ -0,0 +1,53 @@
|
||||
---
|
||||
title: cloud-sql-restore-backup
|
||||
type: docs
|
||||
weight: 10
|
||||
description: "Restores a backup of a Cloud SQL instance."
|
||||
---
|
||||
|
||||
The `cloud-sql-restore-backup` tool restores a backup on a Cloud SQL instance using the Cloud SQL Admin API.
|
||||
|
||||
{{< notice info dd>}}
|
||||
This tool uses a `source` of kind `cloud-sql-admin`.
|
||||
{{< /notice >}}
|
||||
|
||||
## Examples
|
||||
|
||||
Basic backup restore
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
backup-restore-basic:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
description: "Restores a backup onto the given Cloud SQL instance."
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
### Tool Configuration
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| -------------- | :------: | :----------: | ------------------------------------------------ |
|
||||
| kind | string | true | Must be "cloud-sql-restore-backup". |
|
||||
| source | string | true | The name of the `cloud-sql-admin` source to use. |
|
||||
| description | string | false | A description of the tool. |
|
||||
|
||||
### Tool Inputs
|
||||
|
||||
| **parameter** | **type** | **required** | **description** |
|
||||
| ------------------| :------: | :----------: | -----------------------------------------------------------------------------|
|
||||
| target_project | string | true | The project ID of the instance to restore the backup onto. |
|
||||
| target_instance | string | true | The instance to restore the backup onto. Does not include the project ID. |
|
||||
| backup_id | string | true | The identifier of the backup being restored. |
|
||||
| source_project | string | false | (Optional) The project ID of the instance that the backup belongs to. |
|
||||
| source_instance | string | false | (Optional) Cloud SQL instance ID of the instance that the backup belongs to. |
|
||||
|
||||
## Usage Notes
|
||||
|
||||
- The `backup_id` field can be a BackupRun ID (which will be an int64), backup name, or BackupDR backup name.
|
||||
- If the `backup_id` field contains a BackupRun ID (i.e. an int64), the optional fields `source_project` and `source_instance` must also be provided.
|
||||
|
||||
## See Also
|
||||
- [Cloud SQL Admin API documentation](https://cloud.google.com/sql/docs/mysql/admin-api)
|
||||
- [Toolbox Cloud SQL tools documentation](../cloudsql)
|
||||
- [Cloud SQL Restore API documentation](https://cloud.google.com/sql/docs/mysql/backup-recovery/restoring)
|
||||
@@ -30,6 +30,10 @@ following config for example:
|
||||
- name: userNames
|
||||
type: array
|
||||
description: The user names to be set.
|
||||
items:
|
||||
name: userName # the item name doesn't matter but it has to exist
|
||||
type: string
|
||||
description: username
|
||||
```
|
||||
|
||||
If the input is an array of strings `["Alice", "Sid", "Bob"]`, The final command
|
||||
|
||||
38
go.mod
38
go.mod
@@ -46,6 +46,7 @@ require (
|
||||
github.com/sijms/go-ora/v2 v2.9.0
|
||||
github.com/snowflakedb/gosnowflake v1.18.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/testcontainers/testcontainers-go v0.40.0
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.330.0
|
||||
github.com/valkey-io/valkey-go v1.0.68
|
||||
@@ -89,16 +90,19 @@ require (
|
||||
cloud.google.com/go/iam v1.5.3 // indirect
|
||||
cloud.google.com/go/monitoring v1.24.3 // indirect
|
||||
cloud.google.com/go/trace v1.11.7 // indirect
|
||||
dario.cat/mergo v1.0.2 // indirect
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 // indirect
|
||||
github.com/99designs/keyring v1.2.2 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/internal v1.11.1 // indirect
|
||||
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 // indirect
|
||||
github.com/Azure/go-ansiterm v0.0.0-20250102033503-faa5f7b0171c // indirect
|
||||
github.com/BurntSushi/toml v1.4.0 // indirect
|
||||
github.com/GoogleCloudPlatform/grpc-gcp-go/grpcgcp v1.5.3 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/detectors/gcp v1.29.0 // indirect
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.54.0 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
|
||||
github.com/ajg/form v1.5.1 // indirect
|
||||
@@ -124,17 +128,29 @@ require (
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.34.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.4 // indirect
|
||||
github.com/aws/smithy-go v1.23.0 // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 // indirect
|
||||
github.com/containerd/errdefs v1.0.0 // indirect
|
||||
github.com/containerd/errdefs/pkg v0.3.0 // indirect
|
||||
github.com/containerd/log v0.1.0 // indirect
|
||||
github.com/containerd/platforms v0.2.1 // indirect
|
||||
github.com/couchbase/gocbcore/v10 v10.8.1 // indirect
|
||||
github.com/couchbase/gocbcoreps v0.1.4 // indirect
|
||||
github.com/couchbase/goprotostellar v1.0.2 // indirect
|
||||
github.com/couchbase/tools-common/errors v1.0.0 // indirect
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 // indirect
|
||||
github.com/cpuguy83/dockercfg v0.3.2 // indirect
|
||||
github.com/danieljoos/wincred v1.2.2 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/distribution/reference v0.6.0 // indirect
|
||||
github.com/docker/docker v28.5.1+incompatible // indirect
|
||||
github.com/docker/go-connections v0.6.0 // indirect
|
||||
github.com/docker/go-units v0.5.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 // indirect
|
||||
github.com/ebitengine/purego v0.8.4 // indirect
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
@@ -143,6 +159,7 @@ require (
|
||||
github.com/go-logfmt/logfmt v0.6.0 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
@@ -178,27 +195,47 @@ require (
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.11 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 // indirect
|
||||
github.com/magiconair/properties v1.8.10 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
|
||||
github.com/moby/docker-image-spec v1.3.1 // indirect
|
||||
github.com/moby/go-archive v0.1.0 // indirect
|
||||
github.com/moby/patternmatcher v0.6.0 // indirect
|
||||
github.com/moby/sys/sequential v0.6.0 // indirect
|
||||
github.com/moby/sys/user v0.4.0 // indirect
|
||||
github.com/moby/sys/userns v0.1.0 // indirect
|
||||
github.com/moby/term v0.5.2 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/montanaflynn/stats v0.7.1 // indirect
|
||||
github.com/morikuni/aec v1.0.0 // indirect
|
||||
github.com/mtibben/percent v0.2.1 // indirect
|
||||
github.com/nakagami/chacha20 v0.1.0 // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/opencontainers/image-spec v1.1.1 // indirect
|
||||
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.6 // indirect
|
||||
github.com/sirupsen/logrus v1.9.3 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/stretchr/testify v1.11.1 // indirect
|
||||
github.com/tklauser/go-sysconf v0.3.12 // indirect
|
||||
github.com/tklauser/numcpus v0.6.1 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.2 // indirect
|
||||
github.com/xdg-go/stringprep v1.0.4 // indirect
|
||||
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect
|
||||
github.com/yusufpapurcu/wmi v1.2.4 // indirect
|
||||
github.com/zeebo/errs v1.4.0 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
gitlab.com/nyarla/go-crypt v0.0.0-20160106005555-d9a5dc2b789b // indirect
|
||||
@@ -232,6 +269,7 @@ require (
|
||||
google.golang.org/grpc v1.76.0 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
|
||||
57
go.sum
57
go.sum
@@ -647,6 +647,8 @@ github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4 h1:/vQbFIOMb
|
||||
github.com/99designs/go-keychain v0.0.0-20191008050251-8e49817e8af4/go.mod h1:hN7oaIRCjzsZ2dE+yG5k+rsdt3qcwykqK6HVGcKwsw4=
|
||||
github.com/99designs/keyring v1.2.2 h1:pZd3neh/EmUzWONb35LxQfvuY7kiSXAq3HQd97+XBn0=
|
||||
github.com/99designs/keyring v1.2.2/go.mod h1:wes/FrByc8j7lFOAGLGSNEg8f/PaI3cgTBqhFkHUrPk=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6 h1:He8afgbRMd7mFxO99hRNu+6tazq8nFF9lIwo9JFroBk=
|
||||
github.com/AdaLogics/go-fuzz-headers v0.0.0-20240806141605-e8a1dd7889d6/go.mod h1:8o94RPi1/7XTJvwPpRSzSUedZrtlirdB3r9Z20bi2f8=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0 h1:Gt0j3wceWMwPmiazCa8MzMA0MfhmPIz0Qp0FJ6qcM0U=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.18.0/go.mod h1:Ot/6aikWnKWi4l9QB7qVSwa8iMphQNqkWALMoNT3rzM=
|
||||
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.10.1 h1:B+blDbyVIG3WaikNxPnhPiJ1MThR03b3vKGtER95TP4=
|
||||
@@ -800,6 +802,14 @@ github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443 h1:aQ3y1lwWyqYPiWZThqv
|
||||
github.com/cncf/xds/go v0.0.0-20250501225837-2ac532fd4443/go.mod h1:W+zGtBO5Y1IgJhy4+A9GOqVhqLpfZi+vwmdNXUehLA8=
|
||||
github.com/containerd/continuity v0.4.5 h1:ZRoN1sXq9u7V6QoHMcVWGhOwDFqZ4B9i5H6un1Wh0x4=
|
||||
github.com/containerd/continuity v0.4.5/go.mod h1:/lNJvtJKUQStBzpVQ1+rasXO1LAWtUQssk28EZvJ3nE=
|
||||
github.com/containerd/errdefs v1.0.0 h1:tg5yIfIlQIrxYtu9ajqY42W3lpS19XqdxRQeEwYG8PI=
|
||||
github.com/containerd/errdefs v1.0.0/go.mod h1:+YBYIdtsnF4Iw6nWZhJcqGSg/dwvV7tyJ/kCkyJ2k+M=
|
||||
github.com/containerd/errdefs/pkg v0.3.0 h1:9IKJ06FvyNlexW690DXuQNx2KA2cUJXx151Xdx3ZPPE=
|
||||
github.com/containerd/errdefs/pkg v0.3.0/go.mod h1:NJw6s9HwNuRhnjJhM7pylWwMyAkmCQvQ4GpJHEqRLVk=
|
||||
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
||||
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||
github.com/couchbase/gocb/v2 v2.11.1 h1:xWDco7Qk/XSvGUjbUWRaXi0V35nsMijJnm4vHXN/rqY=
|
||||
github.com/couchbase/gocb/v2 v2.11.1/go.mod h1:aSh1Cmd1sPRpYyiBD5iWPehPWaTVF/oYhrtOAITWb/4=
|
||||
github.com/couchbase/gocbcore/v10 v10.8.1 h1:i4SnH0DH9APGC4GS2vS2m+3u08V7oJwviamOXdgAZOQ=
|
||||
@@ -816,8 +826,12 @@ github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8 h1:MQ
|
||||
github.com/couchbaselabs/gocaves/client v0.0.0-20250107114554-f96479220ae8/go.mod h1:AVekAZwIY2stsJOMWLAS/0uA/+qdp7pjO8EHnl61QkY=
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0 h1:HU9DlAYYWR69jQnLN6cpg0fh0hxW/8d5hnglCXXjW78=
|
||||
github.com/couchbaselabs/gocbconnstr/v2 v2.0.0/go.mod h1:o7T431UOfFVHDNvMBUmUxpHnhivwv7BziUao/nMl81E=
|
||||
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
||||
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/creack/pty v1.1.18 h1:n56/Zwd5o6whRC5PMGretI4IdRLlmBXYNjScPaBgsbY=
|
||||
github.com/creack/pty v1.1.18/go.mod h1:MOBLtS5ELjhRRrroQr9kyvTxUAFNvYEK993ew/Vr4O4=
|
||||
github.com/danieljoos/wincred v1.2.2 h1:774zMFJrqaeYCK2W57BgAem/MLi6mtSE47MB6BOJ0i0=
|
||||
github.com/danieljoos/wincred v1.2.2/go.mod h1:w7w4Utbrz8lqeMbDAK0lkNJUv5sAOkFi7nd/ogr0Uh8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
@@ -826,10 +840,12 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/docker/cli v28.4.0+incompatible h1:RBcf3Kjw2pMtwui5V0DIMdyeab8glEw5QY0UUU4C9kY=
|
||||
github.com/docker/cli v28.4.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/docker v28.4.0+incompatible h1:KVC7bz5zJY/4AZe/78BIvCnPsLaC9T/zh72xnlrTTOk=
|
||||
github.com/docker/docker v28.4.0+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.5.1+incompatible h1:Bm8DchhSD2J6PsFzxC35TZo4TLGR2PdW/E69rU45NhM=
|
||||
github.com/docker/docker v28.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/go-connections v0.6.0 h1:LlMG9azAe1TqfR7sO+NJttz1gy6KO7VJBh+pMmjSD94=
|
||||
github.com/docker/go-connections v0.6.0/go.mod h1:AahvXYshr6JgfUJGdDCs2b5EZG/vmaMAntpSFH5BFKE=
|
||||
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
|
||||
@@ -840,6 +856,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0 h1:bnQc8+GMnidJZA8zc6lLEAb4xNrIqHwO+9TzqvtQZPo=
|
||||
github.com/dvsekhvalnov/jose2go v1.7.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
|
||||
github.com/ebitengine/purego v0.8.4 h1:CF7LEKg5FFOsASUj0+QwaXf8Ht6TlFxg09+S9wz0omw=
|
||||
github.com/ebitengine/purego v0.8.4/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0 h1:7k1Ua+qluFr6p1jfJjGDl97ssJS/P7cHNInzfxgBQAo=
|
||||
github.com/elastic/elastic-transport-go/v8 v8.8.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk=
|
||||
github.com/elastic/go-elasticsearch/v9 v9.2.0 h1:COeL/g20+ixnUbffe4Wfbu88emrHjAq/LhVfmrjqRQs=
|
||||
@@ -913,6 +931,8 @@ github.com/go-logr/logr v1.4.3 h1:CjnDlHq8ikf6E492q6eKboGOC0T8CDaOvkHCIg8idEI=
|
||||
github.com/go-logr/logr v1.4.3/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-ole/go-ole v1.2.6 h1:/Fpf6oFPoeFik9ty7siob0G6Ke8QvQEuVcuChpwXzpY=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||
github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
@@ -1174,9 +1194,13 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21 h1:nlZ1nz22SKluBNkzplrMHBPEVgJO3zVLF6aAws1rrRA=
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.21/go.mod h1:Br1ntSiruDJ/4nYNjpYyWyCbqJ7+GQceWbIgn0hYims=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0 h1:6E+4a0GO5zZEnZ81pIr0yLvtUWk2if982qA3F3QD6H4=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||
github.com/lyft/protoc-gen-star v0.6.0/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star v0.6.1/go.mod h1:TGAoBVkt8w7MPG72TrKIu85MIdXwDuzJYeZuUPFPNwA=
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o=
|
||||
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
||||
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
@@ -1194,8 +1218,18 @@ github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8D
|
||||
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE=
|
||||
github.com/moby/docker-image-spec v1.3.1 h1:jMKff3w6PgbfSa69GfNg+zN/XLhfXJGnEx3Nl2EsFP0=
|
||||
github.com/moby/docker-image-spec v1.3.1/go.mod h1:eKmb5VW8vQEh/BAr2yvVNvuiJuY6UIocYsFu/DxxRpo=
|
||||
github.com/moby/go-archive v0.1.0 h1:Kk/5rdW/g+H8NHdJW2gsXyZ7UnzvJNOy6VKJqueWdcQ=
|
||||
github.com/moby/go-archive v0.1.0/go.mod h1:G9B+YoujNohJmrIYFBpSd54GTUB4lt9S+xVQvsJyFuo=
|
||||
github.com/moby/patternmatcher v0.6.0 h1:GmP9lR19aU5GqSSFko+5pRqHi+Ohk1O69aFiKkVGiPk=
|
||||
github.com/moby/patternmatcher v0.6.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
|
||||
github.com/moby/sys/atomicwriter v0.1.0 h1:kw5D/EqkBwsBFi0ss9v1VG3wIkVhzGvLklJ+w3A14Sw=
|
||||
github.com/moby/sys/atomicwriter v0.1.0/go.mod h1:Ul8oqv2ZMNHOceF643P6FKPXeCmYtlQMvpizfsSoaWs=
|
||||
github.com/moby/sys/sequential v0.6.0 h1:qrx7XFUd/5DxtqcoH1h438hF5TmOvzC/lspjy7zgvCU=
|
||||
github.com/moby/sys/sequential v0.6.0/go.mod h1:uyv8EUTrca5PnDsdMGXhZe6CCe8U/UiTWd+lL+7b/Ko=
|
||||
github.com/moby/sys/user v0.4.0 h1:jhcMKit7SA80hivmFJcbB1vqmw//wU61Zdui2eQXuMs=
|
||||
github.com/moby/sys/user v0.4.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
|
||||
github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
|
||||
github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
|
||||
github.com/moby/term v0.5.2 h1:6qk3FJAFDs6i/q3W/pQ97SX192qKfZgGjCQqfCJkgzQ=
|
||||
github.com/moby/term v0.5.2/go.mod h1:d3djjFCrjnB+fl8NJux+EJzu0msscUP+f8it8hPkFLc=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -1206,6 +1240,8 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/montanaflynn/stats v0.0.0-20171201202039-1bf9dbcd8cbe/go.mod h1:wL8QJuTMNUDYhXwkmfOly8iTdp5TEcJFWZD2D7SIkUc=
|
||||
github.com/montanaflynn/stats v0.7.1 h1:etflOAAHORrCC44V+aR6Ftzort912ZU+YLiSTuV8eaE=
|
||||
github.com/montanaflynn/stats v0.7.1/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
|
||||
github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
|
||||
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
|
||||
github.com/mtibben/percent v0.2.1 h1:5gssi8Nqo8QU/r2pynCm+hBQHpkB/uNK7BJCFogWdzs=
|
||||
github.com/mtibben/percent v0.2.1/go.mod h1:KG9uO+SZkUp+VkRHsCdYQV3XSZrrSpR3O9ibNBTZrns=
|
||||
github.com/nakagami/chacha20 v0.1.0 h1:2fbf5KeVUw7oRpAe6/A7DqvBJLYYu0ka5WstFbnkEVo=
|
||||
@@ -1254,6 +1290,8 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c h1:ncq/mPwQF4JjgDlrVEn3C11VoGHZN7m8qihwgMEtzYw=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||
github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w=
|
||||
@@ -1275,6 +1313,8 @@ github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfF
|
||||
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
|
||||
github.com/shirou/gopsutil/v4 v4.25.6 h1:kLysI2JsKorfaFPcYmcJqbzROzsBWEOAtw6A7dIfqXs=
|
||||
github.com/shirou/gopsutil/v4 v4.25.6/go.mod h1:PfybzyydfZcN+JMMjkF6Zb8Mq1A/VcogFFg7hj50W9c=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
github.com/sijms/go-ora/v2 v2.9.0 h1:+iQbUeTeCOFMb5BsOMgUhV8KWyrv9yjKpcK4x7+MFrg=
|
||||
@@ -1312,9 +1352,15 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o
|
||||
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U=
|
||||
github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U=
|
||||
github.com/testcontainers/testcontainers-go v0.40.0 h1:pSdJYLOVgLE8YdUY2FHQ1Fxu+aMnb6JfVz1mxk7OeMU=
|
||||
github.com/testcontainers/testcontainers-go v0.40.0/go.mod h1:FSXV5KQtX2HAMlm7U3APNyLkkap35zNLxukw9oBi/MY=
|
||||
github.com/thlib/go-timezone-local v0.0.7 h1:fX8zd3aJydqLlTs/TrROrIIdztzsdFV23OzOQx31jII=
|
||||
github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4QP7A8IfyIv4zZKI=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU=
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/trinodb/trino-go-client v0.330.0 h1:TBbHjFBuRjYbGtkNyRAJfzLOcwvz8ECihtMtxSzXqOc=
|
||||
github.com/trinodb/trino-go-client v0.330.0/go.mod h1:BXj9QNy6pA4Gn8eIu9dVdRhetABCjFAOZ6xxsVsOZJE=
|
||||
github.com/valkey-io/valkey-go v1.0.68 h1:bTbfonp49b41DqrF30q+y2JL3gcbjd2IiacFAtO4JBA=
|
||||
@@ -1347,6 +1393,8 @@ github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9dec
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ=
|
||||
github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0=
|
||||
github.com/zeebo/errs v1.4.0 h1:XNdoD/RRMKP7HD0UhJnIzUy74ISdGGxURlYG8HSWSfM=
|
||||
@@ -1627,6 +1675,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -1648,6 +1697,7 @@ golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201204225414-ed752295db88/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -1700,6 +1750,7 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
@@ -2129,6 +2180,8 @@ gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gotest.tools/v3 v3.5.2 h1:7koQfIKdy+I8UTetycgUqXWSDwpgv193Ka+qRsmBY8Q=
|
||||
gotest.tools/v3 v3.5.2/go.mod h1:LtdLGcnqToBH83WByAAi/wiwSFCArdFIUV/xxN4pcjA=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
)
|
||||
|
||||
var expectedToolSources = []string{
|
||||
"alloydb-omni",
|
||||
"alloydb-postgres-admin",
|
||||
"alloydb-postgres-observability",
|
||||
"alloydb-postgres",
|
||||
@@ -99,6 +100,7 @@ func TestLoadPrebuiltToolYAMLs(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGetPrebuiltTool(t *testing.T) {
|
||||
alloydb_omni_config, _ := Get("alloydb-omni")
|
||||
alloydb_admin_config, _ := Get("alloydb-postgres-admin")
|
||||
alloydb_observability_config, _ := Get("alloydb-postgres-observability")
|
||||
alloydb_config, _ := Get("alloydb-postgres")
|
||||
@@ -129,6 +131,9 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
neo4jconfig, _ := Get("neo4j")
|
||||
healthcare_config, _ := Get("cloud-healthcare")
|
||||
snowflake_config, _ := Get("snowflake")
|
||||
if len(alloydb_omni_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb omni prebuilt tools yaml")
|
||||
}
|
||||
if len(alloydb_admin_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb admin prebuilt tools yaml")
|
||||
}
|
||||
|
||||
277
internal/prebuiltconfigs/tools/alloydb-omni.yaml
Normal file
277
internal/prebuiltconfigs/tools/alloydb-omni.yaml
Normal file
@@ -0,0 +1,277 @@
|
||||
# Copyright 2026 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
alloydb-omni-source:
|
||||
kind: postgres
|
||||
host: ${ALLOYDB_OMNI_HOST:localhost}
|
||||
port: ${ALLOYDB_OMNI_PORT:5432}
|
||||
database: ${ALLOYDB_OMNI_DATABASE}
|
||||
user: ${ALLOYDB_OMNI_USER}
|
||||
password: ${ALLOYDB_OMNI_PASSWORD:}
|
||||
queryParams: ${ALLOYDB_OMNI_QUERY_PARAMS:}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: alloydb-omni-source
|
||||
description: Use this tool to execute sql.
|
||||
|
||||
list_tables:
|
||||
kind: postgres-list-tables
|
||||
source: alloydb-omni-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
|
||||
list_active_queries:
|
||||
kind: postgres-list-active-queries
|
||||
source: alloydb-omni-source
|
||||
description: "List the top N (default 50) currently running queries (state='active') from pg_stat_activity, ordered by longest-running first. Returns pid, user, database, application_name, client_addr, state, wait_event_type/wait_event, backend/xact/query start times, computed query_duration, and the SQL text."
|
||||
|
||||
list_available_extensions:
|
||||
kind: postgres-list-available-extensions
|
||||
source: alloydb-omni-source
|
||||
description: "Discover all PostgreSQL extensions available for installation on this server, returning name, default_version, and description."
|
||||
|
||||
list_installed_extensions:
|
||||
kind: postgres-list-installed-extensions
|
||||
source: alloydb-omni-source
|
||||
description: "List all installed PostgreSQL extensions with their name, version, schema, owner, and description."
|
||||
|
||||
long_running_transactions:
|
||||
kind: postgres-long-running-transactions
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_locks:
|
||||
kind: postgres-list-locks
|
||||
source: alloydb-omni-source
|
||||
|
||||
replication_stats:
|
||||
kind: postgres-replication-stats
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_autovacuum_configurations:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "List PostgreSQL autovacuum-related configurations (name and current setting) from pg_settings."
|
||||
statement: |
|
||||
SELECT name,
|
||||
setting
|
||||
FROM pg_settings
|
||||
WHERE category = 'Autovacuum';
|
||||
|
||||
list_columnar_configurations:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "List AlloyDB Omni columnar-related configurations (name and current setting) from pg_settings."
|
||||
statement: |
|
||||
SELECT name,
|
||||
setting
|
||||
FROM pg_settings
|
||||
WHERE name like 'google_columnar_engine.%';
|
||||
|
||||
list_columnar_recommended_columns:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "List AlloyDB Omni columnar-related configurations (name and current setting) from pg_settings."
|
||||
statement: select * from g_columnar_recommended_columns;
|
||||
|
||||
list_memory_configurations:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "List PostgreSQL memory-related configurations (name and current setting) from pg_settings."
|
||||
statement: |
|
||||
(
|
||||
SELECT
|
||||
name,
|
||||
pg_size_pretty((setting::bigint * 1024)::bigint) setting
|
||||
FROM pg_settings
|
||||
WHERE name IN ('work_mem', 'maintenance_work_mem')
|
||||
)
|
||||
UNION ALL
|
||||
(
|
||||
SELECT
|
||||
name,
|
||||
pg_size_pretty((((setting::bigint) * 8) * 1024)::bigint)
|
||||
FROM pg_settings
|
||||
WHERE name IN ('shared_buffers', 'wal_buffers', 'effective_cache_size', 'temp_buffers')
|
||||
)
|
||||
ORDER BY 1 DESC;
|
||||
|
||||
list_top_bloated_tables:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: |
|
||||
List the top tables by dead-tuple (approximate bloat signal), returning schema, table, live/dead tuples, percentage, and last vacuum/analyze times.
|
||||
statement: |
|
||||
SELECT
|
||||
schemaname AS schema_name,
|
||||
relname AS relation_name,
|
||||
n_live_tup AS live_tuples,
|
||||
n_dead_tup AS dead_tuples,
|
||||
TRUNC((n_dead_tup::NUMERIC / NULLIF(n_live_tup + n_dead_tup, 0)) * 100, 2) AS dead_tuple_percentage,
|
||||
last_vacuum,
|
||||
last_autovacuum,
|
||||
last_analyze,
|
||||
last_autoanalyze
|
||||
FROM pg_stat_user_tables
|
||||
ORDER BY n_dead_tup DESC
|
||||
LIMIT COALESCE($1::int, 50);
|
||||
parameters:
|
||||
- name: limit
|
||||
description: "The maximum number of results to return."
|
||||
type: integer
|
||||
default: 50
|
||||
|
||||
list_replication_slots:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "List key details for all PostgreSQL replication slots (e.g., type, database, active status) and calculates the size of the outstanding WAL that is being prevented from removal by the slot."
|
||||
statement: |
|
||||
SELECT
|
||||
slot_name,
|
||||
slot_type,
|
||||
plugin,
|
||||
database,
|
||||
temporary,
|
||||
active,
|
||||
restart_lsn,
|
||||
confirmed_flush_lsn,
|
||||
xmin,
|
||||
catalog_xmin,
|
||||
pg_size_pretty(pg_wal_lsn_diff(pg_current_wal_lsn(), restart_lsn)) AS retained_wal
|
||||
FROM pg_replication_slots;
|
||||
|
||||
list_invalid_indexes:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "Lists all invalid PostgreSQL indexes which are taking up disk space but are unusable by the query planner. Typically created by failed CREATE INDEX CONCURRENTLY operations."
|
||||
statement: |
|
||||
SELECT
|
||||
nspname AS schema_name,
|
||||
indexrelid::regclass AS index_name,
|
||||
indrelid::regclass AS table_name,
|
||||
pg_size_pretty(pg_total_relation_size(indexrelid)) AS index_size,
|
||||
indisready,
|
||||
indisvalid,
|
||||
pg_get_indexdef(pg_class.oid) AS index_def
|
||||
FROM pg_index
|
||||
JOIN pg_class ON pg_class.oid = pg_index.indexrelid
|
||||
JOIN pg_namespace ON pg_namespace.oid = pg_class.relnamespace
|
||||
WHERE indisvalid = FALSE;
|
||||
|
||||
get_query_plan:
|
||||
kind: postgres-sql
|
||||
source: alloydb-omni-source
|
||||
description: "Generate a PostgreSQL EXPLAIN plan in JSON format for a single SQL statement—without executing it. This returns the optimizer's estimated plan, costs, and rows (no ANALYZE, no extra options). Use in production safely for plan inspection, regression checks, and query tuning workflows."
|
||||
statement: |
|
||||
EXPLAIN (FORMAT JSON) {{.query}};
|
||||
templateParameters:
|
||||
- name: query
|
||||
type: string
|
||||
description: "The SQL statement for which you want to generate plan (omit the EXPLAIN keyword)."
|
||||
required: true
|
||||
|
||||
list_views:
|
||||
kind: postgres-list-views
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_schemas:
|
||||
kind: postgres-list-schemas
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_indexes:
|
||||
kind: postgres-list-indexes
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_sequences:
|
||||
kind: postgres-list-sequences
|
||||
source: alloydb-omni-source
|
||||
|
||||
database_overview:
|
||||
kind: postgres-database-overview
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_triggers:
|
||||
kind: postgres-list-triggers
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_query_stats:
|
||||
kind: postgres-list-query-stats
|
||||
source: alloydb-omni-source
|
||||
|
||||
get_column_cardinality:
|
||||
kind: postgres-get-column-cardinality
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_table_stats:
|
||||
kind: postgres-list-table-stats
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_publication_tables:
|
||||
kind: postgres-list-publication-tables
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_tablespaces:
|
||||
kind: postgres-list-tablespaces
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_pg_settings:
|
||||
kind: postgres-list-pg-settings
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_database_stats:
|
||||
kind: postgres-list-database-stats
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_roles:
|
||||
kind: postgres-list-roles
|
||||
source: alloydb-omni-source
|
||||
|
||||
list_stored_procedure:
|
||||
kind: postgres-list-stored-procedure
|
||||
source: alloydb-omni-source
|
||||
|
||||
toolsets:
|
||||
alloydb_omni_database_tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
- list_active_queries
|
||||
- list_available_extensions
|
||||
- list_installed_extensions
|
||||
- list_autovacuum_configurations
|
||||
- list_columnar_configurations
|
||||
- list_columnar_recommended_columns
|
||||
- list_memory_configurations
|
||||
- list_top_bloated_tables
|
||||
- list_replication_slots
|
||||
- list_invalid_indexes
|
||||
- get_query_plan
|
||||
- list_views
|
||||
- list_schemas
|
||||
- database_overview
|
||||
- list_triggers
|
||||
- list_indexes
|
||||
- list_sequences
|
||||
- long_running_transactions
|
||||
- list_locks
|
||||
- replication_stats
|
||||
- list_query_stats
|
||||
- get_column_cardinality
|
||||
- list_publication_tables
|
||||
- list_tablespaces
|
||||
- list_pg_settings
|
||||
- list_database_stats
|
||||
- list_roles
|
||||
- list_table_stats
|
||||
- list_stored_procedure
|
||||
@@ -46,6 +46,9 @@ tools:
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mssql_admin_tools:
|
||||
@@ -58,3 +61,4 @@ toolsets:
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
@@ -46,6 +46,9 @@ tools:
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_mysql_admin_tools:
|
||||
@@ -58,3 +61,4 @@ toolsets:
|
||||
- wait_for_operation
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
@@ -49,6 +49,9 @@ tools:
|
||||
create_backup:
|
||||
kind: cloud-sql-create-backup
|
||||
source: cloud-sql-admin-source
|
||||
restore_backup:
|
||||
kind: cloud-sql-restore-backup
|
||||
source: cloud-sql-admin-source
|
||||
|
||||
toolsets:
|
||||
cloud_sql_postgres_admin_tools:
|
||||
@@ -62,3 +65,4 @@ toolsets:
|
||||
- postgres_upgrade_precheck
|
||||
- clone_instance
|
||||
- create_backup
|
||||
- restore_backup
|
||||
|
||||
@@ -64,12 +64,14 @@ type ServerConfig struct {
|
||||
Stdio bool
|
||||
// DisableReload indicates if the user has disabled dynamic reloading for Toolbox.
|
||||
DisableReload bool
|
||||
// UI indicates if Toolbox UI endpoints (/ui) are available
|
||||
// UI indicates if Toolbox UI endpoints (/ui) are available.
|
||||
UI bool
|
||||
// Specifies a list of origins permitted to access this server.
|
||||
AllowedOrigins []string
|
||||
// Specifies a list of hosts permitted to access this server
|
||||
// Specifies a list of hosts permitted to access this server.
|
||||
AllowedHosts []string
|
||||
// UserAgentMetadata specifies additional metadata to append to the User-Agent string.
|
||||
UserAgentMetadata []string
|
||||
}
|
||||
|
||||
type logFormat string
|
||||
|
||||
@@ -27,19 +27,21 @@ import (
|
||||
v20241105 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20241105"
|
||||
v20250326 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250326"
|
||||
v20250618 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20250618"
|
||||
v20251125 "github.com/googleapis/genai-toolbox/internal/server/mcp/v20251125"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// LATEST_PROTOCOL_VERSION is the latest version of the MCP protocol supported.
|
||||
// Update the version used in InitializeResponse when this value is updated.
|
||||
const LATEST_PROTOCOL_VERSION = v20250618.PROTOCOL_VERSION
|
||||
const LATEST_PROTOCOL_VERSION = v20251125.PROTOCOL_VERSION
|
||||
|
||||
// SUPPORTED_PROTOCOL_VERSIONS is the MCP protocol versions that are supported.
|
||||
var SUPPORTED_PROTOCOL_VERSIONS = []string{
|
||||
v20241105.PROTOCOL_VERSION,
|
||||
v20250326.PROTOCOL_VERSION,
|
||||
v20250618.PROTOCOL_VERSION,
|
||||
v20251125.PROTOCOL_VERSION,
|
||||
}
|
||||
|
||||
// InitializeResponse runs capability negotiation and protocol version agreement.
|
||||
@@ -102,6 +104,8 @@ func NotificationHandler(ctx context.Context, body []byte) error {
|
||||
// This is the Operation phase of the lifecycle for MCP client-server connections.
|
||||
func ProcessMethod(ctx context.Context, mcpVersion string, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
switch mcpVersion {
|
||||
case v20251125.PROTOCOL_VERSION:
|
||||
return v20251125.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||
case v20250618.PROTOCOL_VERSION:
|
||||
return v20250618.ProcessMethod(ctx, id, method, toolset, promptset, resourceMgr, body, header)
|
||||
case v20250326.PROTOCOL_VERSION:
|
||||
|
||||
@@ -183,6 +183,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
|
||||
@@ -183,6 +183,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
|
||||
@@ -176,6 +176,13 @@ func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *re
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
|
||||
333
internal/server/mcp/v20251125/method.go
Normal file
333
internal/server/mcp/v20251125/method.go
Normal file
@@ -0,0 +1,333 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20251125
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/resources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
)
|
||||
|
||||
// ProcessMethod returns a response for the request.
|
||||
func ProcessMethod(ctx context.Context, id jsonrpc.RequestId, method string, toolset tools.Toolset, promptset prompts.Promptset, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
switch method {
|
||||
case PING:
|
||||
return pingHandler(id)
|
||||
case TOOLS_LIST:
|
||||
return toolsListHandler(id, toolset, body)
|
||||
case TOOLS_CALL:
|
||||
return toolsCallHandler(ctx, id, resourceMgr, body, header)
|
||||
case PROMPTS_LIST:
|
||||
return promptsListHandler(ctx, id, promptset, body)
|
||||
case PROMPTS_GET:
|
||||
return promptsGetHandler(ctx, id, resourceMgr, body)
|
||||
default:
|
||||
err := fmt.Errorf("invalid method %s", method)
|
||||
return jsonrpc.NewError(id, jsonrpc.METHOD_NOT_FOUND, err.Error(), nil), err
|
||||
}
|
||||
}
|
||||
|
||||
// pingHandler handles the "ping" method by returning an empty response.
|
||||
func pingHandler(id jsonrpc.RequestId) (any, error) {
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: struct{}{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func toolsListHandler(id jsonrpc.RequestId, toolset tools.Toolset, body []byte) (any, error) {
|
||||
var req ListToolsRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools list request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
result := ListToolsResult{
|
||||
Tools: toolset.McpManifest,
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// toolsCallHandler generate a response for tools call.
|
||||
func toolsCallHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte, header http.Header) (any, error) {
|
||||
authServices := resourceMgr.GetAuthServiceMap()
|
||||
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var req CallToolRequest
|
||||
if err = json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp tools call request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
toolName := req.Params.Name
|
||||
toolArgument := req.Params.Arguments
|
||||
logger.DebugContext(ctx, fmt.Sprintf("tool name: %s", toolName))
|
||||
tool, ok := resourceMgr.GetTool(toolName)
|
||||
if !ok {
|
||||
err = fmt.Errorf("invalid tool name: tool with name %q does not exist", toolName)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Get access token
|
||||
authTokenHeadername, err := tool.GetAuthTokenHeaderName(resourceMgr)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||
}
|
||||
accessToken := tools.AccessToken(header.Get(authTokenHeadername))
|
||||
|
||||
// Check if this specific tool requires the standard authorization header
|
||||
clientAuth, err := tool.RequiresClientAuthorization(resourceMgr)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("error during invocation: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, errMsg.Error(), nil), errMsg
|
||||
}
|
||||
if clientAuth {
|
||||
if accessToken == "" {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, "missing access token in the 'Authorization' header", nil), util.ErrUnauthorized
|
||||
}
|
||||
}
|
||||
|
||||
// marshal arguments and decode it using decodeJSON instead to prevent loss between floats/int.
|
||||
aMarshal, err := json.Marshal(toolArgument)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("unable to marshal tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
var data map[string]any
|
||||
if err = util.DecodeJSON(bytes.NewBuffer(aMarshal), &data); err != nil {
|
||||
err = fmt.Errorf("unable to decode tools argument: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Tool authentication
|
||||
// claimsFromAuth maps the name of the authservice to the claims retrieved from it.
|
||||
claimsFromAuth := make(map[string]map[string]any)
|
||||
|
||||
// if using stdio, header will be nil and auth will not be supported
|
||||
if header != nil {
|
||||
for _, aS := range authServices {
|
||||
claims, err := aS.GetClaimsFromHeader(ctx, header)
|
||||
if err != nil {
|
||||
logger.DebugContext(ctx, err.Error())
|
||||
continue
|
||||
}
|
||||
if claims == nil {
|
||||
// authService not present in header
|
||||
continue
|
||||
}
|
||||
claimsFromAuth[aS.GetName()] = claims
|
||||
}
|
||||
}
|
||||
|
||||
// Tool authorization check
|
||||
verifiedAuthServices := make([]string, len(claimsFromAuth))
|
||||
i := 0
|
||||
for k := range claimsFromAuth {
|
||||
verifiedAuthServices[i] = k
|
||||
i++
|
||||
}
|
||||
|
||||
// Check if any of the specified auth services is verified
|
||||
isAuthorized := tool.Authorized(verifiedAuthServices)
|
||||
if !isAuthorized {
|
||||
err = fmt.Errorf("unauthorized Tool call: Please make sure your specify correct auth headers: %w", util.ErrUnauthorized)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "tool invocation authorized")
|
||||
|
||||
params, err := tool.ParseParams(data, claimsFromAuth)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("provided parameters were invalid: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("invocation params: %s", params))
|
||||
|
||||
embeddingModels := resourceMgr.GetEmbeddingModelMap()
|
||||
params, err = tool.EmbedParams(ctx, params, embeddingModels)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error embedding parameters: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// run tool invocation and generate response.
|
||||
results, err := tool.Invoke(ctx, resourceMgr, params, accessToken)
|
||||
if err != nil {
|
||||
errStr := err.Error()
|
||||
// Missing authService tokens.
|
||||
if errors.Is(err, util.ErrUnauthorized) {
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Upstream auth error
|
||||
if strings.Contains(errStr, "Error 401") || strings.Contains(errStr, "Error 403") {
|
||||
if clientAuth {
|
||||
// Error with client credentials should pass down to the client
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
// Auth error with ADC should raise internal 500 error
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
text := TextContent{
|
||||
Type: "text",
|
||||
Text: err.Error(),
|
||||
}
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: []TextContent{text}, IsError: true},
|
||||
}, nil
|
||||
}
|
||||
|
||||
content := make([]TextContent, 0)
|
||||
|
||||
sliceRes, ok := results.([]any)
|
||||
if !ok {
|
||||
sliceRes = []any{results}
|
||||
}
|
||||
|
||||
for _, d := range sliceRes {
|
||||
text := TextContent{Type: "text"}
|
||||
dM, err := json.Marshal(d)
|
||||
if err != nil {
|
||||
text.Text = fmt.Sprintf("fail to marshal: %s, result: %s", err, d)
|
||||
} else {
|
||||
text.Text = string(dM)
|
||||
}
|
||||
content = append(content, text)
|
||||
}
|
||||
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: CallToolResult{Content: content},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// promptsListHandler handles the "prompts/list" method.
|
||||
func promptsListHandler(ctx context.Context, id jsonrpc.RequestId, promptset prompts.Promptset, body []byte) (any, error) {
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "handling prompts/list request")
|
||||
|
||||
var req ListPromptsRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp prompts list request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
result := ListPromptsResult{
|
||||
Prompts: promptset.McpManifest,
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("returning %d prompts", len(promptset.McpManifest)))
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// promptsGetHandler handles the "prompts/get" method.
|
||||
func promptsGetHandler(ctx context.Context, id jsonrpc.RequestId, resourceMgr *resources.ResourceManager, body []byte) (any, error) {
|
||||
// retrieve logger from context
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "handling prompts/get request")
|
||||
|
||||
var req GetPromptRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
err = fmt.Errorf("invalid mcp prompts/get request: %w", err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_REQUEST, err.Error(), nil), err
|
||||
}
|
||||
|
||||
promptName := req.Params.Name
|
||||
logger.DebugContext(ctx, fmt.Sprintf("prompt name: %s", promptName))
|
||||
prompt, ok := resourceMgr.GetPrompt(promptName)
|
||||
if !ok {
|
||||
err := fmt.Errorf("prompt with name %q does not exist", promptName)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Parse the arguments provided in the request.
|
||||
argValues, err := prompt.ParseArgs(req.Params.Arguments, nil)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("invalid arguments for prompt %q: %w", promptName, err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INVALID_PARAMS, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, fmt.Sprintf("parsed args: %v", argValues))
|
||||
|
||||
// Substitute the argument values into the prompt's messages.
|
||||
substituted, err := prompt.SubstituteParams(argValues)
|
||||
if err != nil {
|
||||
err = fmt.Errorf("error substituting params for prompt %q: %w", promptName, err)
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
|
||||
// Cast the result to the expected []prompts.Message type.
|
||||
substitutedMessages, ok := substituted.([]prompts.Message)
|
||||
if !ok {
|
||||
err = fmt.Errorf("internal error: SubstituteParams returned unexpected type")
|
||||
return jsonrpc.NewError(id, jsonrpc.INTERNAL_ERROR, err.Error(), nil), err
|
||||
}
|
||||
logger.DebugContext(ctx, "substituted params successfully")
|
||||
|
||||
// Format the response messages into the required structure.
|
||||
promptMessages := make([]PromptMessage, len(substitutedMessages))
|
||||
for i, msg := range substitutedMessages {
|
||||
promptMessages[i] = PromptMessage{
|
||||
Role: msg.Role,
|
||||
Content: TextContent{
|
||||
Type: "text",
|
||||
Text: msg.Content,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
result := GetPromptResult{
|
||||
Description: prompt.Manifest().Description,
|
||||
Messages: promptMessages,
|
||||
}
|
||||
|
||||
return jsonrpc.JSONRPCResponse{
|
||||
Jsonrpc: jsonrpc.JSONRPC_VERSION,
|
||||
Id: id,
|
||||
Result: result,
|
||||
}, nil
|
||||
}
|
||||
219
internal/server/mcp/v20251125/types.go
Normal file
219
internal/server/mcp/v20251125/types.go
Normal file
@@ -0,0 +1,219 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package v20251125
|
||||
|
||||
import (
|
||||
"github.com/googleapis/genai-toolbox/internal/prompts"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
// SERVER_NAME is the server name used in Implementation.
|
||||
const SERVER_NAME = "Toolbox"
|
||||
|
||||
// PROTOCOL_VERSION is the version of the MCP protocol in this package.
|
||||
const PROTOCOL_VERSION = "2025-11-25"
|
||||
|
||||
// methods that are supported.
|
||||
const (
|
||||
PING = "ping"
|
||||
TOOLS_LIST = "tools/list"
|
||||
TOOLS_CALL = "tools/call"
|
||||
PROMPTS_LIST = "prompts/list"
|
||||
PROMPTS_GET = "prompts/get"
|
||||
)
|
||||
|
||||
/* Empty result */
|
||||
|
||||
// EmptyResult represents a response that indicates success but carries no data.
|
||||
type EmptyResult jsonrpc.Result
|
||||
|
||||
/* Pagination */
|
||||
|
||||
// Cursor is an opaque token used to represent a cursor for pagination.
|
||||
type Cursor string
|
||||
|
||||
type PaginatedRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
// An opaque token representing the current pagination position.
|
||||
// If provided, the server should return results starting after this cursor.
|
||||
Cursor Cursor `json:"cursor,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
type PaginatedResult struct {
|
||||
jsonrpc.Result
|
||||
// An opaque token representing the pagination position after the last returned result.
|
||||
// If present, there may be more results available.
|
||||
NextCursor Cursor `json:"nextCursor,omitempty"`
|
||||
}
|
||||
|
||||
/* Tools */
|
||||
|
||||
// Sent from the client to request a list of tools the server has.
|
||||
type ListToolsRequest struct {
|
||||
PaginatedRequest
|
||||
}
|
||||
|
||||
// The server's response to a tools/list request from the client.
|
||||
type ListToolsResult struct {
|
||||
PaginatedResult
|
||||
Tools []tools.McpManifest `json:"tools"`
|
||||
}
|
||||
|
||||
// Used by the client to invoke a tool provided by the server.
|
||||
type CallToolRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
} `json:"params,omitempty"`
|
||||
}
|
||||
|
||||
// The sender or recipient of messages and data in a conversation.
|
||||
type Role string
|
||||
|
||||
const (
|
||||
RoleUser Role = "user"
|
||||
RoleAssistant Role = "assistant"
|
||||
)
|
||||
|
||||
// Base for objects that include optional annotations for the client.
|
||||
// The client can use annotations to inform how objects are used or displayed
|
||||
type Annotated struct {
|
||||
Annotations *struct {
|
||||
// Describes who the intended customer of this object or data is.
|
||||
// It can include multiple entries to indicate content useful for multiple
|
||||
// audiences (e.g., `["user", "assistant"]`).
|
||||
Audience []Role `json:"audience,omitempty"`
|
||||
// Describes how important this data is for operating the server.
|
||||
//
|
||||
// A value of 1 means "most important," and indicates that the data is
|
||||
// effectively required, while 0 means "least important," and indicates that
|
||||
// the data is entirely optional.
|
||||
//
|
||||
// @TJS-type number
|
||||
// @minimum 0
|
||||
// @maximum 1
|
||||
Priority float64 `json:"priority,omitempty"`
|
||||
} `json:"annotations,omitempty"`
|
||||
}
|
||||
|
||||
// TextContent represents text provided to or from an LLM.
|
||||
type TextContent struct {
|
||||
Annotated
|
||||
Type string `json:"type"`
|
||||
// The text content of the message.
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// The server's response to a tool call.
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
type CallToolResult struct {
|
||||
jsonrpc.Result
|
||||
// Could be either a TextContent, ImageContent, or EmbeddedResources
|
||||
// For Toolbox, we will only be sending TextContent
|
||||
Content []TextContent `json:"content"`
|
||||
// Whether the tool call ended in an error.
|
||||
// If not set, this is assumed to be false (the call was successful).
|
||||
//
|
||||
// Any errors that originate from the tool SHOULD be reported inside the result
|
||||
// object, with `isError` set to true, _not_ as an MCP protocol-level error
|
||||
// response. Otherwise, the LLM would not be able to see that an error occurred
|
||||
// and self-correct.
|
||||
//
|
||||
// However, any errors in _finding_ the tool, an error indicating that the
|
||||
// server does not support tool calls, or any other exceptional conditions,
|
||||
// should be reported as an MCP error response.
|
||||
IsError bool `json:"isError,omitempty"`
|
||||
// An optional JSON object that represents the structured result of the tool call.
|
||||
StructuredContent map[string]any `json:"structuredContent,omitempty"`
|
||||
}
|
||||
|
||||
// Additional properties describing a Tool to clients.
|
||||
//
|
||||
// NOTE: all properties in ToolAnnotations are **hints**.
|
||||
// They are not guaranteed to provide a faithful description of
|
||||
// tool behavior (including descriptive properties like `title`).
|
||||
//
|
||||
// Clients should never make tool use decisions based on ToolAnnotations
|
||||
// received from untrusted servers.
|
||||
type ToolAnnotations struct {
|
||||
// A human-readable title for the tool.
|
||||
Title string `json:"title,omitempty"`
|
||||
// If true, the tool does not modify its environment.
|
||||
// Default: false
|
||||
ReadOnlyHint bool `json:"readOnlyHint,omitempty"`
|
||||
// If true, the tool may perform destructive updates to its environment.
|
||||
// If false, the tool performs only additive updates.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: true
|
||||
DestructiveHint bool `json:"destructiveHint,omitempty"`
|
||||
// If true, calling the tool repeatedly with the same arguments
|
||||
// will have no additional effect on the its environment.
|
||||
// (This property is meaningful only when `readOnlyHint == false`)
|
||||
// Default: false
|
||||
IdempotentHint bool `json:"idempotentHint,omitempty"`
|
||||
// If true, this tool may interact with an "open world" of external
|
||||
// entities. If false, the tool's domain of interaction is closed.
|
||||
// For example, the world of a web search tool is open, whereas that
|
||||
// of a memory tool is not.
|
||||
// Default: true
|
||||
OpenWorldHint bool `json:"openWorldHint,omitempty"`
|
||||
}
|
||||
|
||||
/* Prompts */
|
||||
|
||||
// Sent from the client to request a list of prompts the server has.
|
||||
type ListPromptsRequest struct {
|
||||
PaginatedRequest
|
||||
}
|
||||
|
||||
// The server's response to a prompts/list request from the client.
|
||||
type ListPromptsResult struct {
|
||||
PaginatedResult
|
||||
Prompts []prompts.McpManifest `json:"prompts"`
|
||||
}
|
||||
|
||||
// Used by the client to get a prompt provided by the server.
|
||||
type GetPromptRequest struct {
|
||||
jsonrpc.Request
|
||||
Params struct {
|
||||
Name string `json:"name"`
|
||||
Arguments map[string]any `json:"arguments,omitempty"`
|
||||
} `json:"params"`
|
||||
}
|
||||
|
||||
// The server's response to a prompts/get request from the client.
|
||||
type GetPromptResult struct {
|
||||
jsonrpc.Result
|
||||
Description string `json:"description,omitempty"`
|
||||
Messages []PromptMessage `json:"messages"`
|
||||
}
|
||||
|
||||
// Describes a message returned as part of a prompt.
|
||||
type PromptMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content TextContent `json:"content"`
|
||||
}
|
||||
@@ -37,6 +37,7 @@ const jsonrpcVersion = "2.0"
|
||||
const protocolVersion20241105 = "2024-11-05"
|
||||
const protocolVersion20250326 = "2025-03-26"
|
||||
const protocolVersion20250618 = "2025-06-18"
|
||||
const protocolVersion20251125 = "2025-11-25"
|
||||
const serverName = "Toolbox"
|
||||
|
||||
var basicInputSchema = map[string]any{
|
||||
@@ -485,6 +486,23 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "version 2025-11-25",
|
||||
protocol: protocolVersion20251125,
|
||||
idHeader: false,
|
||||
initWant: map[string]any{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "mcp-initialize",
|
||||
"result": map[string]any{
|
||||
"protocolVersion": "2025-11-25",
|
||||
"capabilities": map[string]any{
|
||||
"tools": map[string]any{"listChanged": false},
|
||||
"prompts": map[string]any{"listChanged": false},
|
||||
},
|
||||
"serverInfo": map[string]any{"name": serverName, "version": fakeVersionString},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, vtc := range versTestCases {
|
||||
t.Run(vtc.name, func(t *testing.T) {
|
||||
@@ -494,8 +512,7 @@ func TestMcpEndpoint(t *testing.T) {
|
||||
if sessionId != "" {
|
||||
header["Mcp-Session-Id"] = sessionId
|
||||
}
|
||||
|
||||
if vtc.protocol == protocolVersion20250618 {
|
||||
if vtc.protocol != protocolVersion20241105 && vtc.protocol != protocolVersion20250326 {
|
||||
header["MCP-Protocol-Version"] = vtc.protocol
|
||||
}
|
||||
|
||||
|
||||
@@ -64,7 +64,11 @@ func InitializeConfigs(ctx context.Context, cfg ServerConfig) (
|
||||
map[string]prompts.Promptset,
|
||||
error,
|
||||
) {
|
||||
ctx = util.WithUserAgent(ctx, cfg.Version)
|
||||
metadataStr := cfg.Version
|
||||
if len(cfg.UserAgentMetadata) > 0 {
|
||||
metadataStr += "+" + strings.Join(cfg.UserAgentMetadata, "+")
|
||||
}
|
||||
ctx = util.WithUserAgent(ctx, metadataStr)
|
||||
instrumentation, err := util.InstrumentationFromContext(ctx)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
@@ -304,10 +308,14 @@ func hostCheck(allowedHosts map[string]struct{}) func(http.Handler) http.Handler
|
||||
return func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
_, hasWildcard := allowedHosts["*"]
|
||||
_, hostIsAllowed := allowedHosts[r.Host]
|
||||
hostname := r.Host
|
||||
if host, _, err := net.SplitHostPort(r.Host); err == nil {
|
||||
hostname = host
|
||||
}
|
||||
_, hostIsAllowed := allowedHosts[hostname]
|
||||
if !hasWildcard && !hostIsAllowed {
|
||||
// Return 400 Bad Request or 403 Forbidden to block the attack
|
||||
http.Error(w, "Invalid Host header", http.StatusBadRequest)
|
||||
// Return 403 Forbidden to block the attack
|
||||
http.Error(w, "Invalid Host header", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
next.ServeHTTP(w, r)
|
||||
@@ -406,7 +414,11 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
|
||||
}
|
||||
allowedHostsMap := make(map[string]struct{}, len(cfg.AllowedHosts))
|
||||
for _, h := range cfg.AllowedHosts {
|
||||
allowedHostsMap[h] = struct{}{}
|
||||
hostname := h
|
||||
if host, _, err := net.SplitHostPort(h); err == nil {
|
||||
hostname = host
|
||||
}
|
||||
allowedHostsMap[hostname] = struct{}{}
|
||||
}
|
||||
r.Use(hostCheck(allowedHostsMap))
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
"time"
|
||||
@@ -36,7 +37,10 @@ import (
|
||||
|
||||
const SourceKind string = "cloud-sql-admin"
|
||||
|
||||
var targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||
var (
|
||||
targetLinkRegex = regexp.MustCompile(`/projects/([^/]+)/instances/([^/]+)/databases/([^/]+)`)
|
||||
backupDRRegex = regexp.MustCompile(`^projects/([^/]+)/locations/([^/]+)/backupVaults/([^/]+)/dataSources/([^/]+)/backups/([^/]+)$`)
|
||||
)
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
@@ -374,6 +378,48 @@ func (s *Source) InsertBackupRun(ctx context.Context, project, instance, locatio
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (s *Source) RestoreBackup(ctx context.Context, targetProject, targetInstance, sourceProject, sourceInstance, backupID, accessToken string) (any, error) {
|
||||
request := &sqladmin.InstancesRestoreBackupRequest{}
|
||||
|
||||
// There are 3 scenarios for the backup identifier:
|
||||
// 1. The identifier is an int64 containing the timestamp of the BackupRun.
|
||||
// This is used to restore standard backups, and the RestoreBackupContext
|
||||
// field should be populated with the backup ID and source instance info.
|
||||
// 2. The identifier is a string of the format
|
||||
// 'projects/{project-id}/locations/{location}/backupVaults/{backupvault}/dataSources/{datasource}/backups/{backup-uid}'.
|
||||
// This is used to restore BackupDR backups, and the BackupdrBackup field
|
||||
// should be populated.
|
||||
// 3. The identifer is a string of the format
|
||||
// 'projects/{project-id}/backups/{backup-uid}'. In this case, the Backup
|
||||
// field should be populated.
|
||||
if backupRunID, err := strconv.ParseInt(backupID, 10, 64); err == nil {
|
||||
if sourceProject == "" || targetInstance == "" {
|
||||
return nil, fmt.Errorf("source project and instance are required when restoring via backup ID")
|
||||
}
|
||||
request.RestoreBackupContext = &sqladmin.RestoreBackupContext{
|
||||
Project: sourceProject,
|
||||
InstanceId: sourceInstance,
|
||||
BackupRunId: backupRunID,
|
||||
}
|
||||
} else if backupDRRegex.MatchString(backupID) {
|
||||
request.BackupdrBackup = backupID
|
||||
} else {
|
||||
request.Backup = backupID
|
||||
}
|
||||
|
||||
service, err := s.GetService(ctx, string(accessToken))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := service.Instances.RestoreBackup(targetProject, targetInstance, request).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error restoring backup: %w", err)
|
||||
}
|
||||
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func generateCloudSQLConnectionMessage(ctx context.Context, source *Source, logger log.Logger, opResponse map[string]any, connectionMessageTemplate string) (string, bool) {
|
||||
operationType, ok := opResponse["operationType"].(string)
|
||||
if !ok || operationType != "CREATE_DATABASE" {
|
||||
|
||||
@@ -28,6 +28,21 @@ import (
|
||||
|
||||
const kind string = "cloud-gemini-data-analytics-query"
|
||||
|
||||
// Guidance is the tool guidance string.
|
||||
const Guidance = `Tool guidance:
|
||||
Inputs:
|
||||
1. query: A natural language formulation of a database query.
|
||||
Outputs: (all optional)
|
||||
1. disambiguation_question: Clarification questions or comments where the tool needs the users' input.
|
||||
2. generated_query: The generated query for the user query.
|
||||
3. intent_explanation: An explanation for why the tool produced ` + "`generated_query`" + `.
|
||||
4. query_result: The result of executing ` + "`generated_query`" + `.
|
||||
5. natural_language_answer: The natural language answer that summarizes the ` + "`query`" + ` and ` + "`query_result`" + `.
|
||||
|
||||
Usage guidance:
|
||||
1. If ` + "`disambiguation_question`" + ` is produced, then solicit the needed inputs from the user and try the tool with a new ` + "`query`" + ` that has the needed clarification.
|
||||
2. If ` + "`natural_language_answer`" + ` is produced, use ` + "`intent_explanation`" + ` and ` + "`generated_query`" + ` to see if you need to clarify any assumptions for the user.`
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
@@ -68,11 +83,18 @@ func (cfg Config) ToolConfigKind() string {
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// Define the parameters for the Gemini Data Analytics Query API
|
||||
// The prompt is the only input parameter.
|
||||
// The query is the only input parameter.
|
||||
allParameters := parameters.Parameters{
|
||||
parameters.NewStringParameterWithRequired("prompt", "The natural language question to ask.", true),
|
||||
parameters.NewStringParameterWithRequired("query", "A natural language formulation of a database query.", true),
|
||||
}
|
||||
// The input and outputs are for tool guidance, usage guidance is for multi-turn interaction.
|
||||
guidance := Guidance
|
||||
|
||||
if cfg.Description != "" {
|
||||
cfg.Description += "\n\n" + guidance
|
||||
} else {
|
||||
cfg.Description = guidance
|
||||
}
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters, nil)
|
||||
|
||||
return Tool{
|
||||
@@ -105,9 +127,9 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
}
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
prompt, ok := paramsMap["prompt"].(string)
|
||||
query, ok := paramsMap["query"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("prompt parameter not found or not a string")
|
||||
return nil, fmt.Errorf("query parameter not found or not a string")
|
||||
}
|
||||
|
||||
// Parse the access token if provided
|
||||
@@ -125,7 +147,7 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
|
||||
payload := &QueryDataRequest{
|
||||
Parent: payloadParent,
|
||||
Prompt: prompt,
|
||||
Prompt: query,
|
||||
Context: t.Context,
|
||||
GenerationOptions: t.GenerationOptions,
|
||||
}
|
||||
|
||||
@@ -328,9 +328,9 @@ func TestInvoke(t *testing.T) {
|
||||
t.Fatalf("failed to initialize tool: %v", err)
|
||||
}
|
||||
|
||||
// Prepare parameters for invocation - ONLY prompt
|
||||
// Prepare parameters for invocation - ONLY query
|
||||
params := parameters.ParamValues{
|
||||
{Name: "prompt", Value: "How many accounts who have region in Prague are eligible for loans?"},
|
||||
{Name: "query", Value: "How many accounts who have region in Prague are eligible for loans?"},
|
||||
}
|
||||
|
||||
resourceMgr := resources.NewResourceManager(srcs, nil, nil, nil, nil, nil, nil)
|
||||
|
||||
@@ -103,10 +103,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", pageURLKey)
|
||||
}
|
||||
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.FHIRFetchPage(ctx, url, tokenStr)
|
||||
}
|
||||
|
||||
@@ -131,9 +131,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", patientIDKey)
|
||||
}
|
||||
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var opts []googleapi.CallOption
|
||||
|
||||
@@ -161,9 +161,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
return nil, err
|
||||
}
|
||||
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var summary bool
|
||||
|
||||
@@ -95,9 +95,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetDataset(tokenStr)
|
||||
}
|
||||
|
||||
@@ -116,9 +116,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetDICOMStore(storeID, tokenStr)
|
||||
}
|
||||
|
||||
@@ -116,9 +116,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetDICOMStoreMetrics(storeID, tokenStr)
|
||||
}
|
||||
|
||||
@@ -130,9 +130,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", idKey)
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetFHIRResource(storeID, resType, resID, tokenStr)
|
||||
}
|
||||
|
||||
@@ -116,9 +116,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetFHIRStore(storeID, tokenStr)
|
||||
}
|
||||
|
||||
@@ -116,9 +116,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.GetFHIRStoreMetrics(storeID, tokenStr)
|
||||
}
|
||||
|
||||
@@ -95,9 +95,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.ListDICOMStores(tokenStr)
|
||||
}
|
||||
|
||||
@@ -95,9 +95,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
return source.ListFHIRStores(tokenStr)
|
||||
}
|
||||
|
||||
@@ -127,9 +127,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
study, ok := params.AsMap()[studyInstanceUIDKey].(string)
|
||||
if !ok {
|
||||
|
||||
@@ -140,9 +140,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
opts, err := common.ParseDICOMSearchParameters(params, []string{sopInstanceUIDKey, patientNameKey, patientIDKey, accessionNumberKey, referringPhysicianNameKey, studyDateKey, modalityKey})
|
||||
|
||||
@@ -138,9 +138,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
opts, err := common.ParseDICOMSearchParameters(params, []string{seriesInstanceUIDKey, patientNameKey, patientIDKey, accessionNumberKey, referringPhysicianNameKey, studyDateKey, modalityKey})
|
||||
|
||||
@@ -133,9 +133,12 @@ func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, para
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
var tokenStr string
|
||||
if source.UseClientAuthorization() {
|
||||
tokenStr, err = accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
}
|
||||
opts, err := common.ParseDICOMSearchParameters(params, []string{studyInstanceUIDKey, patientNameKey, patientIDKey, accessionNumberKey, referringPhysicianNameKey, studyDateKey})
|
||||
if err != nil {
|
||||
|
||||
@@ -0,0 +1,183 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlrestorebackup
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util/parameters"
|
||||
"google.golang.org/api/sqladmin/v1"
|
||||
)
|
||||
|
||||
const kind string = "cloud-sql-restore-backup"
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
type compatibleSource interface {
|
||||
GetDefaultProject() string
|
||||
GetService(context.Context, string) (*sqladmin.Service, error)
|
||||
UseClientAuthorization() bool
|
||||
RestoreBackup(ctx context.Context, targetProject, targetInstance, sourceProject, sourceInstance, backupID, accessToken string) (any, error)
|
||||
}
|
||||
|
||||
// Config defines the configuration for the restore-backup tool.
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Description string `yaml:"description"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
// ToolConfigKind returns the kind of the tool.
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
// Initialize initializes the tool from the configuration.
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source %q not compatible", kind, cfg.Source)
|
||||
}
|
||||
|
||||
project := s.GetDefaultProject()
|
||||
var targetProjectParam parameters.Parameter
|
||||
if project != "" {
|
||||
targetProjectParam = parameters.NewStringParameterWithDefault("target_project", project, "The GCP project ID. This is pre-configured; do not ask for it unless the user explicitly provides a different one.")
|
||||
} else {
|
||||
targetProjectParam = parameters.NewStringParameter("target_project", "The project ID")
|
||||
}
|
||||
|
||||
allParameters := parameters.Parameters{
|
||||
targetProjectParam,
|
||||
parameters.NewStringParameter("target_instance", "Cloud SQL instance ID of the target instance. This does not include the project ID."),
|
||||
parameters.NewStringParameter("backup_id", "Identifier of the backup being restored. Can be a BackupRun ID, backup name, or BackupDR backup name. Use the full backup ID as provided, do not try to parse it"),
|
||||
parameters.NewStringParameterWithRequired("source_project", "GCP project ID of the instance that the backup belongs to. Only required if the backup_id is a BackupRun ID.", false),
|
||||
parameters.NewStringParameterWithRequired("source_instance", "Cloud SQL instance ID of the instance that the backup belongs to. Only required if the backup_id is a BackupRun ID.", false),
|
||||
}
|
||||
paramManifest := allParameters.Manifest()
|
||||
|
||||
description := cfg.Description
|
||||
if description == "" {
|
||||
description = "Restores a backup on a Cloud SQL instance."
|
||||
}
|
||||
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, description, cfg.AuthRequired, allParameters, nil)
|
||||
|
||||
return Tool{
|
||||
Config: cfg,
|
||||
AllParams: allParameters,
|
||||
manifest: tools.Manifest{Description: description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Tool represents the restore-backup tool.
|
||||
type Tool struct {
|
||||
Config
|
||||
AllParams parameters.Parameters `yaml:"allParams"`
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) ToConfig() tools.ToolConfig {
|
||||
return t.Config
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, resourceMgr tools.SourceProvider, params parameters.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Kind)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
targetProject, ok := paramsMap["target_project"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error casting 'target_project' parameter: %v", paramsMap["target_project"])
|
||||
}
|
||||
targetInstance, ok := paramsMap["target_instance"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error casting 'target_instance' parameter: %v", paramsMap["target_instance"])
|
||||
}
|
||||
backupID, ok := paramsMap["backup_id"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("error casting 'backup_id' parameter: %v", paramsMap["backup_id"])
|
||||
}
|
||||
sourceProject, _ := paramsMap["source_project"].(string)
|
||||
sourceInstance, _ := paramsMap["source_instance"].(string)
|
||||
|
||||
return source.RestoreBackup(ctx, targetProject, targetInstance, sourceProject, sourceInstance, backupID, string(accessToken))
|
||||
}
|
||||
|
||||
// ParseParams parses the parameters for the tool.
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (parameters.ParamValues, error) {
|
||||
return parameters.ParseParams(t.AllParams, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) EmbedParams(ctx context.Context, paramValues parameters.ParamValues, embeddingModelsMap map[string]embeddingmodels.EmbeddingModel) (parameters.ParamValues, error) {
|
||||
return parameters.EmbedParams(ctx, t.AllParams, paramValues, embeddingModelsMap, nil)
|
||||
}
|
||||
|
||||
// Manifest returns the tool's manifest.
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
// McpManifest returns the tool's MCP manifest.
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
// Authorized checks if the tool is authorized.
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization(resourceMgr tools.SourceProvider) (bool, error) {
|
||||
source, err := tools.GetCompatibleSource[compatibleSource](resourceMgr, t.Source, t.Name, t.Kind)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return source.UseClientAuthorization(), nil
|
||||
}
|
||||
|
||||
func (t Tool) GetAuthTokenHeaderName(resourceMgr tools.SourceProvider) (string, error) {
|
||||
return "Authorization", nil
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsqlrestorebackup_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
|
||||
)
|
||||
|
||||
func TestParseFromYaml(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
restore-backup-tool:
|
||||
kind: cloud-sql-restore-backup
|
||||
description: a test description
|
||||
source: a-source
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"restore-backup-tool": cloudsqlrestorebackup.Config{
|
||||
Name: "restore-backup-tool",
|
||||
Kind: "cloud-sql-restore-backup",
|
||||
Description: "a test description",
|
||||
Source: "a-source",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
88
server.json
88
server.json
@@ -31,6 +31,18 @@
|
||||
"default": "tools.yaml",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--tools-files",
|
||||
"description": "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with –-tools-file or –-tools-folder.",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--tools-folder",
|
||||
"description": "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with –-tools-file or –-tools-files.",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--address",
|
||||
@@ -70,6 +82,82 @@
|
||||
"warn",
|
||||
"error"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--logging-format",
|
||||
"description": "Specify logging format to use.",
|
||||
"default": "standard",
|
||||
"choices": [
|
||||
"standard",
|
||||
"json"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--disable-reload",
|
||||
"description": "Disables dynamic reloading of tools file.",
|
||||
"format": "boolean",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--prebuilt",
|
||||
"description": "Use a prebuilt tool configuration by source type.",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--stdio",
|
||||
"description": "Listens via MCP STDIO instead of acting as a remote HTTP server.",
|
||||
"format": "boolean",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--telemetry-gcp",
|
||||
"description": "Enable exporting directly to Google Cloud Monitoring.",
|
||||
"format": "boolean",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--telemetry-otlp",
|
||||
"description": "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318').",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--telemetry-service-name",
|
||||
"description": "Sets the value of the service.name resource attribute for telemetry data.",
|
||||
"default": "toolbox",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--ui",
|
||||
"description": "Launches the Toolbox UI web server.",
|
||||
"format": "boolean",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--allowed-origins",
|
||||
"description": "Specifies a list of origins permitted to access this server.",
|
||||
"default": "*",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--help",
|
||||
"description": "Show help for toolbox",
|
||||
"isRequired": false
|
||||
},
|
||||
{
|
||||
"type": "named",
|
||||
"name": "--version",
|
||||
"description": "Show version for toolbox",
|
||||
"isRequired": false
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
162
tests/alloydbomni/alloydb_omni_integration_test.go
Normal file
162
tests/alloydbomni/alloydb_omni_integration_test.go
Normal file
@@ -0,0 +1,162 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package alloydbomni
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"regexp"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/testcontainers/testcontainers-go"
|
||||
"github.com/testcontainers/testcontainers-go/wait"
|
||||
)
|
||||
|
||||
var (
|
||||
AlloyDBSourceKind = "alloydb-omni-source"
|
||||
AlloyDBToolKind = "postgres-sql"
|
||||
AlloyDBUser = "postgres"
|
||||
AlloyDBPass = "mysecretpassword"
|
||||
AlloyDBDatabase = "postgres"
|
||||
)
|
||||
|
||||
// Copied over from postgres.go
|
||||
func initAlloyDBConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) {
|
||||
// urlExample := "postgres:dd//username:password@localhost:5432/database_name"
|
||||
url := &url.URL{
|
||||
Scheme: "postgres",
|
||||
User: url.UserPassword(user, pass),
|
||||
Host: fmt.Sprintf("%s:%s", host, port),
|
||||
Path: dbname,
|
||||
}
|
||||
pool, err := pgxpool.New(context.Background(), url.String())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Unable to create connection pool: %w", err)
|
||||
}
|
||||
|
||||
return pool, nil
|
||||
}
|
||||
|
||||
func setupAlloyDBContainer(ctx context.Context, t *testing.T) (string, string, func()) {
|
||||
t.Helper()
|
||||
|
||||
req := testcontainers.ContainerRequest{
|
||||
Image: "google/alloydbomni:16.9.0-ubi9", // Pinning version for stability
|
||||
ExposedPorts: []string{"5432/tcp"},
|
||||
Env: map[string]string{
|
||||
"POSTGRES_PASSWORD": AlloyDBPass,
|
||||
},
|
||||
WaitingFor: wait.ForAll(
|
||||
wait.ForLog("Post Startup: Successfully reinstalled extensions"),
|
||||
wait.ForExposedPort(),
|
||||
),
|
||||
}
|
||||
|
||||
container, err := testcontainers.GenericContainer(ctx, testcontainers.GenericContainerRequest{
|
||||
ContainerRequest: req,
|
||||
Started: true,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to start alloydb container: %s", err)
|
||||
}
|
||||
|
||||
cleanup := func() {
|
||||
if err := container.Terminate(ctx); err != nil {
|
||||
t.Fatalf("failed to terminate container: %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
host, err := container.Host(ctx)
|
||||
if err != nil {
|
||||
cleanup()
|
||||
t.Fatalf("failed to get container host: %s", err)
|
||||
}
|
||||
|
||||
mappedPort, err := container.MappedPort(ctx, "5432")
|
||||
if err != nil {
|
||||
cleanup()
|
||||
t.Fatalf("failed to get container mapped port: %s", err)
|
||||
}
|
||||
|
||||
return host, mappedPort.Port(), cleanup
|
||||
}
|
||||
|
||||
func TestAlloyDB(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
AlloyDBHost, AlloyDBPort, containerCleanup := setupAlloyDBContainer(ctx, t)
|
||||
defer containerCleanup()
|
||||
|
||||
os.Setenv("ALLOYDB_OMNI_HOST", AlloyDBHost)
|
||||
os.Setenv("ALLOYDB_OMNI_PORT", AlloyDBPort)
|
||||
os.Setenv("ALLOYDB_OMNI_USER", AlloyDBUser)
|
||||
os.Setenv("ALLOYDB_OMNI_PASSWORD", AlloyDBPass)
|
||||
os.Setenv("ALLOYDB_OMNI_DATABASE", AlloyDBDatabase)
|
||||
|
||||
args := []string{"--prebuilt", "alloydb-omni"}
|
||||
|
||||
pool, err := initAlloyDBConnectionPool(AlloyDBHost, AlloyDBPort, AlloyDBUser, AlloyDBPass, AlloyDBDatabase)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create alloydb connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupPostgresTables(t, ctx, pool)
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, map[string]any{}, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
// Wait for server to be ready
|
||||
waitCtx, waitCancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer waitCancel()
|
||||
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
// Run Postgres prebuilt tool tests
|
||||
tests.RunPostgresListViewsTest(t, ctx, pool)
|
||||
tests.RunPostgresListSchemasTest(t, ctx, pool)
|
||||
tests.RunPostgresListActiveQueriesTest(t, ctx, pool)
|
||||
tests.RunPostgresListAvailableExtensionsTest(t)
|
||||
tests.RunPostgresListInstalledExtensionsTest(t)
|
||||
tests.RunPostgresDatabaseOverviewTest(t, ctx, pool)
|
||||
tests.RunPostgresListTriggersTest(t, ctx, pool)
|
||||
tests.RunPostgresListIndexesTest(t, ctx, pool)
|
||||
tests.RunPostgresListSequencesTest(t, ctx, pool)
|
||||
tests.RunPostgresLongRunningTransactionsTest(t, ctx, pool)
|
||||
tests.RunPostgresListLocksTest(t, ctx, pool)
|
||||
tests.RunPostgresReplicationStatsTest(t, ctx, pool)
|
||||
tests.RunPostgresGetColumnCardinalityTest(t, ctx, pool)
|
||||
tests.RunPostgresListTableStatsTest(t, ctx, pool)
|
||||
tests.RunPostgresListPublicationTablesTest(t, ctx, pool)
|
||||
tests.RunPostgresListTableSpacesTest(t)
|
||||
tests.RunPostgresListPgSettingsTest(t, ctx, pool)
|
||||
tests.RunPostgresListDatabaseStatsTest(t, ctx, pool)
|
||||
tests.RunPostgresListRolesTest(t, ctx, pool)
|
||||
tests.RunPostgresListStoredProcedureTest(t, ctx, pool)
|
||||
}
|
||||
@@ -147,12 +147,20 @@ func TestAlloyDBPgToolEndpoints(t *testing.T) {
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
|
||||
// Set up table for semanti search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
defer tearDownVectorTable(t)
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
toolsFile := tests.GetToolsConfig(sourceConfig, AlloyDBPostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
|
||||
toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
|
||||
tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
|
||||
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, AlloyDBPostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
|
||||
|
||||
// Add semantic search tool config
|
||||
insertStmt, searchStmt := tests.GetPostgresVectorSearchStmts(vectorTableName)
|
||||
toolsFile = tests.AddSemanticSearchConfig(t, toolsFile, AlloyDBPostgresToolKind, insertStmt, searchStmt)
|
||||
|
||||
toolsFile = tests.AddPostgresPrebuiltConfig(t, toolsFile)
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
|
||||
@@ -139,12 +139,12 @@ func TestCloudGdaToolEndpoints(t *testing.T) {
|
||||
// 1. RunToolGetTestByName
|
||||
expectedManifest := map[string]any{
|
||||
toolName: map[string]any{
|
||||
"description": "Test GDA Tool",
|
||||
"description": "Test GDA Tool\n\n" + cloudgda.Guidance,
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"name": "prompt",
|
||||
"name": "query",
|
||||
"type": "string",
|
||||
"description": "The natural language question to ask.",
|
||||
"description": "A natural language formulation of a database query.",
|
||||
"required": true,
|
||||
"authSources": []any{},
|
||||
},
|
||||
@@ -155,7 +155,7 @@ func TestCloudGdaToolEndpoints(t *testing.T) {
|
||||
tests.RunToolGetTestByName(t, toolName, expectedManifest)
|
||||
|
||||
// 2. RunToolInvokeParametersTest
|
||||
params := []byte(`{"prompt": "test question"}`)
|
||||
params := []byte(`{"query": "test question"}`)
|
||||
tests.RunToolInvokeParametersTest(t, toolName, params, "\"queryResult\":\"SELECT * FROM table;\"")
|
||||
|
||||
// 3. Manual MCP Tool Call Test
|
||||
@@ -172,7 +172,7 @@ func TestCloudGdaToolEndpoints(t *testing.T) {
|
||||
Params: map[string]any{
|
||||
"name": toolName,
|
||||
"arguments": map[string]any{
|
||||
"prompt": "test question",
|
||||
"query": "test question",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -112,8 +112,7 @@ func TestHealthcareToolEndpoints(t *testing.T) {
|
||||
fhirStoreID := "fhir-store-" + uuid.New().String()
|
||||
dicomStoreID := "dicom-store-" + uuid.New().String()
|
||||
|
||||
patient1ID, patient2ID, teardown := setupHealthcareResources(t, healthcareService, healthcareDataset, fhirStoreID, dicomStoreID)
|
||||
defer teardown(t)
|
||||
patient1ID, patient2ID := setupHealthcareResources(t, healthcareService, healthcareDataset, fhirStoreID, dicomStoreID)
|
||||
|
||||
toolsFile := getToolsConfig(sourceConfig)
|
||||
toolsFile = addClientAuthSourceConfig(t, toolsFile)
|
||||
@@ -173,10 +172,8 @@ func TestHealthcareToolWithStoreRestriction(t *testing.T) {
|
||||
disallowedFHIRStoreID := "fhir-store-disallowed-" + uuid.New().String()
|
||||
disallowedDICOMStoreID := "dicom-store-disallowed-" + uuid.New().String()
|
||||
|
||||
_, _, teardownAllowedStores := setupHealthcareResources(t, healthcareService, healthcareDataset, allowedFHIRStoreID, allowedDICOMStoreID)
|
||||
defer teardownAllowedStores(t)
|
||||
_, _, teardownDisallowedStores := setupHealthcareResources(t, healthcareService, healthcareDataset, disallowedFHIRStoreID, disallowedDICOMStoreID)
|
||||
defer teardownDisallowedStores(t)
|
||||
setupHealthcareResources(t, healthcareService, healthcareDataset, allowedFHIRStoreID, allowedDICOMStoreID)
|
||||
setupHealthcareResources(t, healthcareService, healthcareDataset, disallowedFHIRStoreID, disallowedDICOMStoreID)
|
||||
|
||||
// Configure source with dataset restriction.
|
||||
sourceConfig["allowedFhirStores"] = []string{allowedFHIRStoreID}
|
||||
@@ -257,7 +254,7 @@ func newHealthcareService(ctx context.Context) (*healthcare.Service, error) {
|
||||
return healthcareService, nil
|
||||
}
|
||||
|
||||
func setupHealthcareResources(t *testing.T, service *healthcare.Service, datasetID, fhirStoreID, dicomStoreID string) (string, string, func(*testing.T)) {
|
||||
func setupHealthcareResources(t *testing.T, service *healthcare.Service, datasetID, fhirStoreID, dicomStoreID string) (string, string) {
|
||||
datasetName := fmt.Sprintf("projects/%s/locations/%s/datasets/%s", healthcareProject, healthcareRegion, datasetID)
|
||||
var err error
|
||||
|
||||
@@ -266,12 +263,24 @@ func setupHealthcareResources(t *testing.T, service *healthcare.Service, dataset
|
||||
if fhirStore, err = service.Projects.Locations.Datasets.FhirStores.Create(datasetName, fhirStore).FhirStoreId(fhirStoreID).Do(); err != nil {
|
||||
t.Fatalf("failed to create fhir store: %v", err)
|
||||
}
|
||||
// Register cleanup
|
||||
t.Cleanup(func() {
|
||||
if _, err := service.Projects.Locations.Datasets.FhirStores.Delete(fhirStore.Name).Do(); err != nil {
|
||||
t.Logf("failed to delete fhir store: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
// Create DICOM store
|
||||
dicomStore := &healthcare.DicomStore{}
|
||||
if dicomStore, err = service.Projects.Locations.Datasets.DicomStores.Create(datasetName, dicomStore).DicomStoreId(dicomStoreID).Do(); err != nil {
|
||||
t.Fatalf("failed to create dicom store: %v", err)
|
||||
}
|
||||
// Register cleanup
|
||||
t.Cleanup(func() {
|
||||
if _, err := service.Projects.Locations.Datasets.DicomStores.Delete(dicomStore.Name).Do(); err != nil {
|
||||
t.Logf("failed to delete dicom store: %v", err)
|
||||
}
|
||||
})
|
||||
|
||||
// Create Patient 1
|
||||
patient1Body := bytes.NewBuffer([]byte(`{
|
||||
@@ -317,15 +326,7 @@ func setupHealthcareResources(t *testing.T, service *healthcare.Service, dataset
|
||||
createFHIRResource(t, service, fhirStore.Name, "Observation", observation2Body)
|
||||
}
|
||||
|
||||
teardown := func(t *testing.T) {
|
||||
if _, err := service.Projects.Locations.Datasets.FhirStores.Delete(fhirStore.Name).Do(); err != nil {
|
||||
t.Logf("failed to delete fhir store: %v", err)
|
||||
}
|
||||
if _, err := service.Projects.Locations.Datasets.DicomStores.Delete(dicomStore.Name).Do(); err != nil {
|
||||
t.Logf("failed to delete dicom store: %v", err)
|
||||
}
|
||||
}
|
||||
return patient1ID, patient2ID, teardown
|
||||
return patient1ID, patient2ID
|
||||
}
|
||||
|
||||
func getToolsConfig(sourceConfig map[string]any) map[string]any {
|
||||
|
||||
267
tests/cloudsql/cloud_sql_restore_backup_test.go
Normal file
267
tests/cloudsql/cloud_sql_restore_backup_test.go
Normal file
@@ -0,0 +1,267 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cloudsql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
"google.golang.org/api/sqladmin/v1"
|
||||
)
|
||||
|
||||
var (
|
||||
restoreBackupToolKind = "cloud-sql-restore-backup"
|
||||
)
|
||||
|
||||
type restoreBackupTransport struct {
|
||||
transport http.RoundTripper
|
||||
url *url.URL
|
||||
}
|
||||
|
||||
func (t *restoreBackupTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
if strings.HasPrefix(req.URL.String(), "https://sqladmin.googleapis.com") {
|
||||
req.URL.Scheme = t.url.Scheme
|
||||
req.URL.Host = t.url.Host
|
||||
}
|
||||
return t.transport.RoundTrip(req)
|
||||
}
|
||||
|
||||
type masterRestoreBackupHandler struct {
|
||||
t *testing.T
|
||||
}
|
||||
|
||||
func (h *masterRestoreBackupHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if !strings.Contains(r.UserAgent(), "genai-toolbox/") {
|
||||
h.t.Errorf("User-Agent header not found")
|
||||
}
|
||||
var body sqladmin.InstancesRestoreBackupRequest
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
h.t.Fatalf("failed to decode request body: %v", err)
|
||||
} else {
|
||||
h.t.Logf("Received request body: %+v", body)
|
||||
}
|
||||
|
||||
var expectedBody sqladmin.InstancesRestoreBackupRequest
|
||||
var response any
|
||||
var statusCode int
|
||||
|
||||
switch {
|
||||
case body.Backup != "":
|
||||
expectedBody = sqladmin.InstancesRestoreBackupRequest{
|
||||
Backup: "projects/p1/backups/test-uid",
|
||||
}
|
||||
response = map[string]any{"name": "op1", "status": "PENDING"}
|
||||
statusCode = http.StatusOK
|
||||
case body.BackupdrBackup != "":
|
||||
expectedBody = sqladmin.InstancesRestoreBackupRequest{
|
||||
BackupdrBackup: "projects/p1/locations/us-central1/backupVaults/test-vault/dataSources/test-ds/backups/test-uid",
|
||||
}
|
||||
response = map[string]any{"name": "op1", "status": "PENDING"}
|
||||
statusCode = http.StatusOK
|
||||
case body.RestoreBackupContext != nil:
|
||||
expectedBody = sqladmin.InstancesRestoreBackupRequest{
|
||||
RestoreBackupContext: &sqladmin.RestoreBackupContext{
|
||||
Project: "p1",
|
||||
InstanceId: "source",
|
||||
BackupRunId: 12345,
|
||||
},
|
||||
}
|
||||
response = map[string]any{"name": "op1", "status": "PENDING"}
|
||||
statusCode = http.StatusOK
|
||||
default:
|
||||
http.Error(w, fmt.Sprintf("unhandled restore request body: %v", body), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(expectedBody, body); diff != "" {
|
||||
h.t.Errorf("unexpected request body (-want +got):\n%s", diff)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(statusCode)
|
||||
if err := json.NewEncoder(w).Encode(response); err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
func TestRestoreBackupToolEndpoints(t *testing.T) {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
handler := &masterRestoreBackupHandler{t: t}
|
||||
server := httptest.NewServer(handler)
|
||||
defer server.Close()
|
||||
|
||||
serverURL, err := url.Parse(server.URL)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse server URL: %v", err)
|
||||
}
|
||||
|
||||
originalTransport := http.DefaultClient.Transport
|
||||
if originalTransport == nil {
|
||||
originalTransport = http.DefaultTransport
|
||||
}
|
||||
http.DefaultClient.Transport = &restoreBackupTransport{
|
||||
transport: originalTransport,
|
||||
url: serverURL,
|
||||
}
|
||||
t.Cleanup(func() {
|
||||
http.DefaultClient.Transport = originalTransport
|
||||
})
|
||||
|
||||
var args []string
|
||||
toolsFile := getRestoreBackupToolsConfig()
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
toolName string
|
||||
body string
|
||||
want string
|
||||
expectError bool
|
||||
errorStatus int
|
||||
}{
|
||||
{
|
||||
name: "successful restore with standard backup",
|
||||
toolName: "restore-backup",
|
||||
body: `{"target_project": "p1", "target_instance": "instance-standard", "backup_id": "12345", "source_project": "p1", "source_instance": "source"}`,
|
||||
want: `{"name":"op1","status":"PENDING"}`,
|
||||
},
|
||||
{
|
||||
name: "successful restore with project level backup",
|
||||
toolName: "restore-backup",
|
||||
body: `{"target_project": "p1", "target_instance": "instance-project-level", "backup_id": "projects/p1/backups/test-uid"}`,
|
||||
want: `{"name":"op1","status":"PENDING"}`,
|
||||
},
|
||||
{
|
||||
name: "successful restore with BackupDR backup",
|
||||
toolName: "restore-backup",
|
||||
body: `{"target_project": "p1", "target_instance": "instance-project-level", "backup_id": "projects/p1/locations/us-central1/backupVaults/test-vault/dataSources/test-ds/backups/test-uid"}`,
|
||||
want: `{"name":"op1","status":"PENDING"}`,
|
||||
},
|
||||
{
|
||||
name: "missing source instance info for standard backup",
|
||||
toolName: "restore-backup",
|
||||
body: `{"target_project": "p1", "target_instance": "instance-project-level", "backup_id": "12345"}`,
|
||||
expectError: true,
|
||||
errorStatus: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
name: "missing backup identifier",
|
||||
toolName: "restore-backup",
|
||||
body: `{"target_project": "p1", "target_instance": "instance-project-level"}`,
|
||||
expectError: true,
|
||||
errorStatus: http.StatusBadRequest,
|
||||
},
|
||||
{
|
||||
name: "missing target instance info",
|
||||
toolName: "restore-backup",
|
||||
body: `{"backup_id": "12345"}`,
|
||||
expectError: true,
|
||||
errorStatus: http.StatusBadRequest,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
tc := tc
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
|
||||
req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(tc.body))
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if tc.expectError {
|
||||
if resp.StatusCode != tc.errorStatus {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("expected status %d but got %d: %s", tc.errorStatus, resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var result struct {
|
||||
Result string `json:"result"`
|
||||
}
|
||||
if err := json.NewDecoder(resp.Body).Decode(&result); err != nil {
|
||||
t.Fatalf("failed to decode response: %v", err)
|
||||
}
|
||||
|
||||
var got, want map[string]any
|
||||
if err := json.Unmarshal([]byte(result.Result), &got); err != nil {
|
||||
t.Fatalf("failed to unmarshal result: %v", err)
|
||||
}
|
||||
if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
|
||||
t.Fatalf("failed to unmarshal want: %v", err)
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(got, want) {
|
||||
t.Fatalf("unexpected result: got %+v, want %+v", got, want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func getRestoreBackupToolsConfig() map[string]any {
|
||||
return map[string]any{
|
||||
"sources": map[string]any{
|
||||
"my-cloud-sql-source": map[string]any{
|
||||
"kind": "cloud-sql-admin",
|
||||
},
|
||||
},
|
||||
"tools": map[string]any{
|
||||
"restore-backup": map[string]any{
|
||||
"kind": restoreBackupToolKind,
|
||||
"source": "my-cloud-sql-source",
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -132,12 +132,20 @@ func TestCloudSQLPgSimpleToolEndpoints(t *testing.T) {
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
|
||||
// Set up table for semantic search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
defer tearDownVectorTable(t)
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
toolsFile := tests.GetToolsConfig(sourceConfig, CloudSQLPostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
|
||||
toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
|
||||
tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
|
||||
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, CloudSQLPostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
|
||||
|
||||
// Add semantic search tool config
|
||||
insertStmt, searchStmt := tests.GetPostgresVectorSearchStmts(vectorTableName)
|
||||
toolsFile = tests.AddSemanticSearchConfig(t, toolsFile, CloudSQLPostgresToolKind, insertStmt, searchStmt)
|
||||
|
||||
toolsFile = tests.AddPostgresPrebuiltConfig(t, toolsFile)
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
@@ -186,6 +194,7 @@ func TestCloudSQLPgSimpleToolEndpoints(t *testing.T) {
|
||||
tests.RunPostgresListDatabaseStatsTest(t, ctx, pool)
|
||||
tests.RunPostgresListRolesTest(t, ctx, pool)
|
||||
tests.RunPostgresListStoredProcedureTest(t, ctx, pool)
|
||||
tests.RunSemanticSearchToolInvokeTest(t, "null", "", "The quick brown fox")
|
||||
}
|
||||
|
||||
// Test connection with different IP type
|
||||
|
||||
251
tests/embedding.go
Normal file
251
tests/embedding.go
Normal file
@@ -0,0 +1,251 @@
|
||||
// Copyright 2026 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package tests contains end to end tests meant to verify the Toolbox Server
|
||||
// works as expected when executed as a binary.
|
||||
|
||||
package tests
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/googleapis/genai-toolbox/internal/server/mcp/jsonrpc"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
var apiKey = os.Getenv("API_KEY")
|
||||
|
||||
// AddSemanticSearchConfig adds embedding models and semantic search tools to the config
|
||||
// with configurable tool kind and SQL statements.
|
||||
func AddSemanticSearchConfig(t *testing.T, config map[string]any, toolKind, insertStmt, searchStmt string) map[string]any {
|
||||
config["embeddingModels"] = map[string]any{
|
||||
"gemini_model": map[string]any{
|
||||
"kind": "gemini",
|
||||
"model": "gemini-embedding-001",
|
||||
"apiKey": apiKey,
|
||||
"dimension": 768,
|
||||
},
|
||||
}
|
||||
|
||||
tools, ok := config["tools"].(map[string]any)
|
||||
if !ok {
|
||||
t.Fatalf("unable to get tools from config")
|
||||
}
|
||||
|
||||
tools["insert_docs"] = map[string]any{
|
||||
"kind": toolKind,
|
||||
"source": "my-instance",
|
||||
"description": "Stores content and its vector embedding into the documents table.",
|
||||
"statement": insertStmt,
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"name": "content",
|
||||
"type": "string",
|
||||
"description": "The text content associated with the vector.",
|
||||
},
|
||||
map[string]any{
|
||||
"name": "text_to_embed",
|
||||
"type": "string",
|
||||
"description": "The text content used to generate the vector.",
|
||||
"embeddedBy": "gemini_model",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tools["search_docs"] = map[string]any{
|
||||
"kind": toolKind,
|
||||
"source": "my-instance",
|
||||
"description": "Finds the most semantically similar document to the query vector.",
|
||||
"statement": searchStmt,
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"name": "query",
|
||||
"type": "string",
|
||||
"description": "The text content to search for.",
|
||||
"embeddedBy": "gemini_model",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
config["tools"] = tools
|
||||
return config
|
||||
}
|
||||
|
||||
// RunSemanticSearchToolInvokeTest runs the insert_docs and search_docs tools
|
||||
// via both HTTP and MCP endpoints and verifies the output.
|
||||
func RunSemanticSearchToolInvokeTest(t *testing.T, insertWant, mcpInsertWant, searchWant string) {
|
||||
// Initialize MCP session once for the MCP test cases
|
||||
sessionId := RunInitialize(t, "2024-11-05")
|
||||
|
||||
tcs := []struct {
|
||||
name string
|
||||
api string
|
||||
isMcp bool
|
||||
requestBody interface{}
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "HTTP invoke insert_docs",
|
||||
api: "http://127.0.0.1:5000/api/tool/insert_docs/invoke",
|
||||
isMcp: false,
|
||||
requestBody: `{"content": "The quick brown fox jumps over the lazy dog", "text_to_embed": "The quick brown fox jumps over the lazy dog"}`,
|
||||
want: insertWant,
|
||||
},
|
||||
{
|
||||
name: "HTTP invoke search_docs",
|
||||
api: "http://127.0.0.1:5000/api/tool/search_docs/invoke",
|
||||
isMcp: false,
|
||||
requestBody: `{"query": "fast fox jumping"}`,
|
||||
want: searchWant,
|
||||
},
|
||||
{
|
||||
name: "MCP invoke insert_docs",
|
||||
api: "http://127.0.0.1:5000/mcp",
|
||||
isMcp: true,
|
||||
requestBody: jsonrpc.JSONRPCRequest{
|
||||
Jsonrpc: "2.0",
|
||||
Id: "mcp-insert-docs",
|
||||
Request: jsonrpc.Request{
|
||||
Method: "tools/call",
|
||||
},
|
||||
Params: map[string]any{
|
||||
"name": "insert_docs",
|
||||
"arguments": map[string]any{
|
||||
"content": "The quick brown fox jumps over the lazy dog",
|
||||
"text_to_embed": "The quick brown fox jumps over the lazy dog",
|
||||
},
|
||||
},
|
||||
},
|
||||
want: mcpInsertWant,
|
||||
},
|
||||
{
|
||||
name: "MCP invoke search_docs",
|
||||
api: "http://127.0.0.1:5000/mcp",
|
||||
isMcp: true,
|
||||
requestBody: jsonrpc.JSONRPCRequest{
|
||||
Jsonrpc: "2.0",
|
||||
Id: "mcp-search-docs",
|
||||
Request: jsonrpc.Request{
|
||||
Method: "tools/call",
|
||||
},
|
||||
Params: map[string]any{
|
||||
"name": "search_docs",
|
||||
"arguments": map[string]any{
|
||||
"query": "fast fox jumping",
|
||||
},
|
||||
},
|
||||
},
|
||||
want: searchWant,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
var bodyReader io.Reader
|
||||
headers := map[string]string{}
|
||||
|
||||
// Prepare Request Body and Headers
|
||||
if tc.isMcp {
|
||||
reqBytes, err := json.Marshal(tc.requestBody)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal mcp request: %v", err)
|
||||
}
|
||||
bodyReader = bytes.NewBuffer(reqBytes)
|
||||
if sessionId != "" {
|
||||
headers["Mcp-Session-Id"] = sessionId
|
||||
}
|
||||
} else {
|
||||
bodyReader = bytes.NewBufferString(tc.requestBody.(string))
|
||||
}
|
||||
|
||||
// Send Request
|
||||
resp, respBody := RunRequest(t, http.MethodPost, tc.api, bodyReader, headers)
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(respBody))
|
||||
}
|
||||
|
||||
// Normalize Response to get the actual tool result string
|
||||
var got string
|
||||
if tc.isMcp {
|
||||
var mcpResp struct {
|
||||
Result struct {
|
||||
Content []struct {
|
||||
Text string `json:"text"`
|
||||
} `json:"content"`
|
||||
} `json:"result"`
|
||||
}
|
||||
if err := json.Unmarshal(respBody, &mcpResp); err != nil {
|
||||
t.Fatalf("error parsing mcp response: %s", err)
|
||||
}
|
||||
if len(mcpResp.Result.Content) > 0 {
|
||||
got = mcpResp.Result.Content[0].Text
|
||||
}
|
||||
} else {
|
||||
var httpResp map[string]interface{}
|
||||
if err := json.Unmarshal(respBody, &httpResp); err != nil {
|
||||
t.Fatalf("error parsing http response: %s", err)
|
||||
}
|
||||
if res, ok := httpResp["result"].(string); ok {
|
||||
got = res
|
||||
}
|
||||
}
|
||||
|
||||
if !strings.Contains(got, tc.want) {
|
||||
t.Fatalf("unexpected value: got %q, want %q", got, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// SetupPostgresVectorTable sets up the vector extension and a vector table
|
||||
func SetupPostgresVectorTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool) (string, func(*testing.T)) {
|
||||
t.Helper()
|
||||
if _, err := pool.Exec(ctx, "CREATE EXTENSION IF NOT EXISTS vector"); err != nil {
|
||||
t.Fatalf("failed to create vector extension: %v", err)
|
||||
}
|
||||
|
||||
tableName := "vector_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
createTableStmt := fmt.Sprintf(`CREATE TABLE %s (
|
||||
id SERIAL PRIMARY KEY,
|
||||
content TEXT,
|
||||
embedding vector(768)
|
||||
)`, tableName)
|
||||
|
||||
if _, err := pool.Exec(ctx, createTableStmt); err != nil {
|
||||
t.Fatalf("failed to create table %s: %v", tableName, err)
|
||||
}
|
||||
|
||||
return tableName, func(t *testing.T) {
|
||||
if _, err := pool.Exec(ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s", tableName)); err != nil {
|
||||
t.Errorf("failed to drop table %s: %v", tableName, err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func GetPostgresVectorSearchStmts(vectorTableName string) (string, string) {
|
||||
insertStmt := fmt.Sprintf("INSERT INTO %s (content, embedding) VALUES ($1, $2)", vectorTableName)
|
||||
searchStmt := fmt.Sprintf("SELECT id, content, embedding <-> $1 AS distance FROM %s ORDER BY distance LIMIT 1", vectorTableName)
|
||||
return insertStmt, searchStmt
|
||||
}
|
||||
@@ -111,6 +111,10 @@ func TestPostgres(t *testing.T) {
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
|
||||
// Set up table for semantic search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
defer tearDownVectorTable(t)
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
toolsFile := tests.GetToolsConfig(sourceConfig, PostgresToolKind, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, authToolStmt)
|
||||
toolsFile = tests.AddExecuteSqlConfig(t, toolsFile, "postgres-execute-sql")
|
||||
@@ -118,6 +122,10 @@ func TestPostgres(t *testing.T) {
|
||||
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, PostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
|
||||
toolsFile = tests.AddPostgresPrebuiltConfig(t, toolsFile)
|
||||
|
||||
// Add semantic search tool config
|
||||
insertStmt, searchStmt := tests.GetPostgresVectorSearchStmts(vectorTableName)
|
||||
toolsFile = tests.AddSemanticSearchConfig(t, toolsFile, PostgresToolKind, insertStmt, searchStmt)
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
@@ -165,4 +173,5 @@ func TestPostgres(t *testing.T) {
|
||||
tests.RunPostgresListDatabaseStatsTest(t, ctx, pool)
|
||||
tests.RunPostgresListRolesTest(t, ctx, pool)
|
||||
tests.RunPostgresListStoredProcedureTest(t, ctx, pool)
|
||||
tests.RunSemanticSearchToolInvokeTest(t, "null", "", "The quick brown fox")
|
||||
}
|
||||
|
||||
@@ -1240,7 +1240,10 @@ func RunPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth, user
|
||||
var filteredGot []any
|
||||
for _, item := range got {
|
||||
if tableMap, ok := item.(map[string]interface{}); ok {
|
||||
if schema, ok := tableMap["schema_name"]; ok && schema == "public" {
|
||||
name, _ := tableMap["object_name"].(string)
|
||||
|
||||
// Only keep the table if it matches expected test tables
|
||||
if name == tableNameParam || name == tableNameAuth {
|
||||
filteredGot = append(filteredGot, item)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user