feat(tools/postgres-list-tables): Add new tool postgres source (#1284)

## Description
---
This pull request introduces a new custom tool kind
`postgres-list-tables` that allows users to list tables within a
Postgres database.

### Example Configuration

```yaml
tools:
  postgres_list_tables:
    kind: postgres-list-tables
    source: postgres-source
    description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed.
```

### Example Request
``` 
curl -X POST http://127.0.0.1:5000/api/tool/postgres_list_tables/invoke \
-H "Content-Type: application/json" \
-d '{
    "table_names": "users",
    "output_format": "simple"
}'
```

## PR Checklist
---
> Thank you for opening a Pull Request! Before submitting your PR, there
are a
> few things you can do to make sure it goes smoothly:
- [x] Make sure you reviewed

[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [ ] Make sure to open an issue as a

[bug/issue](https://github.com/googleapis/langchain-google-alloydb-pg-python/issues/new/choose)
before writing your code! That way we can discuss the change, evaluate
  designs, and agree on the general idea
- [x] Ensure the tests and linter pass
- [x] Code coverage does not decrease (if any source code was changed)
- [x] Appropriate docs were updated (if necessary)
- [x] Make sure to add `!` if this involve a breaking change

---------

Co-authored-by: Averi Kitsch <akitsch@google.com>
This commit is contained in:
Sri Varshitha
2025-09-11 15:13:09 +05:30
committed by GitHub
parent 33beb7187d
commit 71f360d315
11 changed files with 583 additions and 262 deletions

View File

@@ -106,6 +106,7 @@ import (
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"

View File

@@ -33,6 +33,9 @@ cluster][alloydb-free-trial].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in AlloyDB Postgres.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in an AlloyDB for PostgreSQL database.
### Pre-built Configurations
- [AlloyDB using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/alloydb_pg_mcp/)

View File

@@ -28,6 +28,9 @@ to a database by following these instructions][csql-pg-quickstart].
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in a PostgreSQL database.
### Pre-built Configurations
- [Cloud SQL for Postgres using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/cloud_sql_pg_mcp/)

View File

@@ -23,6 +23,9 @@ reputation for reliability, feature robustness, and performance.
- [`postgres-execute-sql`](../tools/postgres/postgres-execute-sql.md)
Run parameterized SQL statements in PostgreSQL.
- [`postgres-list-tables`](../tools/postgres/postgres-list-tables.md)
List tables in a PostgreSQL database.
### Pre-built Configurations
- [PostgreSQL using MCP](https://googleapis.github.io/genai-toolbox/how-to/connect-ide/postgres_mcp/)

View File

@@ -0,0 +1,39 @@
---
title: "postgres-list-tables"
type: docs
weight: 1
description: >
The "postgres-list-tables" tool lists schema information for all or specified tables in a Postgres database.
aliases:
- /resources/tools/postgres-list-tables
---
## About
The `postgres-list-tables` tool retrieves schema information for all or specified tables in a Postgres database. It's compatible with any of the following sources:
- [alloydb-postgres](../../sources/alloydb-pg.md)
- [cloud-sql-postgres](../../sources/cloud-sql-pg.md)
- [postgres](../../sources/postgres.md)
`postgres-list-tables` lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). The tool takes the following input parameters:
* `table_names` (optional): Filters by a comma-separated list of names. By default, it lists all tables in user schemas.
* `output_format` (optional): Indicate the output format of table schema. `simple` will return only the table names, `detailed` will return the full table information. Default: `detailed`.
## Example
```yaml
tools:
postgres_list_tables:
kind: postgres-list-tables
source: postgres-source
description: Use this tool to retrieve schema information for all or specified tables. Output format can be simple (only table names) or detailed.
```
## Reference
| **field** | **type** | **required** | **description** |
|-------------|:------------------------------------------:|:------------:|--------------------------------------------------------------------------------------------------|
| kind | string | true | Must be "postgres-list-tables". |
| source | string | true | Name of the source the SQL should execute on. |
| description | string | true | Description of the tool that is passed to the agent. |

View File

@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
alloydb-pg-source:
kind: "alloydb-postgres"
@@ -30,93 +31,9 @@ tools:
description: Use this tool to execute sql.
list_tables:
kind: postgres-sql
kind: postgres-list-tables
source: alloydb-pg-source
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
statement: |
WITH desired_relkinds AS (
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
),
table_info AS (
SELECT
t.oid AS table_oid,
ns.nspname AS schema_name,
t.relname AS table_name,
pg_get_userbyid(t.relowner) AS table_owner,
obj_description(t.oid, 'pg_class') AS table_comment,
t.relkind AS object_kind
FROM
pg_class t
JOIN
pg_namespace ns ON ns.oid = t.relnamespace
CROSS JOIN desired_relkinds dk
WHERE
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
),
columns_info AS (
SELECT
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
),
constraints_info AS (
SELECT
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
),
indexes_info AS (
SELECT
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
),
triggers_info AS (
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
)
SELECT
ti.schema_name,
ti.table_name AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
json_build_object('name', ti.table_name)
ELSE
json_build_object(
'schema_name', ti.schema_name,
'object_name', ti.table_name,
'object_type', CASE ti.object_kind
WHEN 'r' THEN 'TABLE'
WHEN 'p' THEN 'PARTITIONED TABLE'
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
END,
'owner', ti.table_owner,
'comment', ti.table_comment,
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
)
END AS object_details
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
parameters:
- name: table_names
type: string
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
- name: output_format
type: string
description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema."
default: "detailed"
toolsets:
alloydb-postgres-database-tools:

View File

@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
cloudsql-pg-source:
kind: cloud-sql-postgres
@@ -29,93 +30,9 @@ tools:
description: Use this tool to execute sql.
list_tables:
kind: postgres-sql
kind: postgres-list-tables
source: cloudsql-pg-source
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
statement: |
WITH desired_relkinds AS (
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
),
table_info AS (
SELECT
t.oid AS table_oid,
ns.nspname AS schema_name,
t.relname AS table_name,
pg_get_userbyid(t.relowner) AS table_owner,
obj_description(t.oid, 'pg_class') AS table_comment,
t.relkind AS object_kind
FROM
pg_class t
JOIN
pg_namespace ns ON ns.oid = t.relnamespace
CROSS JOIN desired_relkinds dk
WHERE
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
),
columns_info AS (
SELECT
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
),
constraints_info AS (
SELECT
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
),
indexes_info AS (
SELECT
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
),
triggers_info AS (
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
)
SELECT
ti.schema_name,
ti.table_name AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
json_build_object('name', ti.table_name)
ELSE
json_build_object(
'schema_name', ti.schema_name,
'object_name', ti.table_name,
'object_type', CASE ti.object_kind
WHEN 'r' THEN 'TABLE'
WHEN 'p' THEN 'PARTITIONED TABLE'
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
END,
'owner', ti.table_owner,
'comment', ti.table_comment,
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
)
END AS object_details
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
parameters:
- name: table_names
type: string
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
- name: output_format
type: string
description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema."
default: "detailed"
toolsets:
cloud-sql-postgres-database-tools:

View File

@@ -11,6 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
sources:
postgresql-source:
kind: postgres
@@ -28,93 +29,9 @@ tools:
description: Use this tool to execute SQL.
list_tables:
kind: postgres-sql
kind: postgres-list-tables
source: postgresql-source
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
statement: |
WITH desired_relkinds AS (
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
),
table_info AS (
SELECT
t.oid AS table_oid,
ns.nspname AS schema_name,
t.relname AS table_name,
pg_get_userbyid(t.relowner) AS table_owner,
obj_description(t.oid, 'pg_class') AS table_comment,
t.relkind AS object_kind
FROM
pg_class t
JOIN
pg_namespace ns ON ns.oid = t.relnamespace
CROSS JOIN desired_relkinds dk
WHERE
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
),
columns_info AS (
SELECT
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
),
constraints_info AS (
SELECT
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
),
indexes_info AS (
SELECT
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
),
triggers_info AS (
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
)
SELECT
ti.schema_name,
ti.table_name AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
json_build_object('name', ti.table_name)
ELSE
json_build_object(
'schema_name', ti.schema_name,
'object_name', ti.table_name,
'object_type', CASE ti.object_kind
WHEN 'r' THEN 'TABLE'
WHEN 'p' THEN 'PARTITIONED TABLE'
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
END,
'owner', ti.table_owner,
'comment', ti.table_comment,
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
)
END AS object_details
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
parameters:
- name: table_names
type: string
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
- name: output_format
type: string
description: "Optional: Use 'simple' to return table names only or use 'detailed' to return the full information schema."
default: "detailed"
toolsets:
postgres-database-tools:

View File

@@ -0,0 +1,261 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package postgreslisttables
import (
"context"
"fmt"
yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/sources"
"github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
"github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
"github.com/googleapis/genai-toolbox/internal/sources/postgres"
"github.com/googleapis/genai-toolbox/internal/tools"
"github.com/jackc/pgx/v5/pgxpool"
)
const kind string = "postgres-list-tables"
const listTablesStatement = `
WITH desired_relkinds AS (
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
),
table_info AS (
SELECT
t.oid AS table_oid,
ns.nspname AS schema_name,
t.relname AS table_name,
pg_get_userbyid(t.relowner) AS table_owner,
obj_description(t.oid, 'pg_class') AS table_comment,
t.relkind AS object_kind
FROM
pg_class t
JOIN
pg_namespace ns ON ns.oid = t.relnamespace
CROSS JOIN desired_relkinds dk
WHERE
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
),
columns_info AS (
SELECT
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
),
constraints_info AS (
SELECT
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
),
indexes_info AS (
SELECT
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
),
triggers_info AS (
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
)
SELECT
ti.schema_name,
ti.table_name AS object_name,
CASE
WHEN $2 = 'simple' THEN
-- IF format is 'simple', return basic JSON
json_build_object('name', ti.table_name)
ELSE
json_build_object(
'schema_name', ti.schema_name,
'object_name', ti.table_name,
'object_type', CASE ti.object_kind
WHEN 'r' THEN 'TABLE'
WHEN 'p' THEN 'PARTITIONED TABLE'
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
END,
'owner', ti.table_owner,
'comment', ti.table_comment,
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
)
END AS object_details
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
`
func init() {
if !tools.Register(kind, newConfig) {
panic(fmt.Sprintf("tool kind %q already registered", kind))
}
}
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
actual := Config{Name: name}
if err := decoder.DecodeContext(ctx, &actual); err != nil {
return nil, err
}
return actual, nil
}
type compatibleSource interface {
PostgresPool() *pgxpool.Pool
}
// validate compatible sources are still compatible
var _ compatibleSource = &alloydbpg.Source{}
var _ compatibleSource = &cloudsqlpg.Source{}
var _ compatibleSource = &postgres.Source{}
var compatibleSources = [...]string{alloydbpg.SourceKind, cloudsqlpg.SourceKind, postgres.SourceKind}
type Config struct {
Name string `yaml:"name" validate:"required"`
Kind string `yaml:"kind" validate:"required"`
Source string `yaml:"source" validate:"required"`
Description string `yaml:"description" validate:"required"`
AuthRequired []string `yaml:"authRequired"`
}
// validate interface
var _ tools.ToolConfig = Config{}
func (cfg Config) ToolConfigKind() string {
return kind
}
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
// verify source exists
rawS, ok := srcs[cfg.Source]
if !ok {
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
}
// verify the source is compatible
s, ok := rawS.(compatibleSource)
if !ok {
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
allParameters := tools.Parameters{
tools.NewStringParameterWithDefault("table_names", "", "Optional: A comma-separated list of table names. If empty, details for all tables will be listed."),
tools.NewStringParameterWithDefault("output_format", "detailed", "Optional: Use 'simple' for names only or 'detailed' for full info."),
}
paramManifest := allParameters.Manifest()
inputSchema := allParameters.McpManifest()
mcpManifest := tools.McpManifest{
Name: cfg.Name,
Description: cfg.Description,
InputSchema: inputSchema,
}
t := Tool{
Name: cfg.Name,
Kind: kind,
AuthRequired: cfg.AuthRequired,
AllParams: allParameters,
Pool: s.PostgresPool(),
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
mcpManifest: mcpManifest,
}
return t, nil
}
// validate interface
var _ tools.Tool = Tool{}
type Tool struct {
Name string `yaml:"name"`
Kind string `yaml:"kind"`
AuthRequired []string `yaml:"authRequired"`
AllParams tools.Parameters `yaml:"allParams"`
Pool *pgxpool.Pool
manifest tools.Manifest
mcpManifest tools.McpManifest
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
paramsMap := params.AsMap()
tableNames, ok := paramsMap["table_names"].(string)
if !ok {
return nil, fmt.Errorf("invalid 'table_names' parameter; expected a string")
}
outputFormat, _ := paramsMap["output_format"].(string)
if outputFormat != "simple" && outputFormat != "detailed" {
return nil, fmt.Errorf("invalid value for output_format: must be 'simple' or 'detailed', but got %q", outputFormat)
}
results, err := t.Pool.Query(ctx, listTablesStatement, tableNames, outputFormat)
if err != nil {
return nil, fmt.Errorf("unable to execute query: %w", err)
}
defer results.Close()
fields := results.FieldDescriptions()
var out []map[string]any
for results.Next() {
values, err := results.Values()
if err != nil {
return nil, fmt.Errorf("unable to parse row: %w", err)
}
rowMap := make(map[string]any)
for i, field := range fields {
rowMap[string(field.Name)] = values[i]
}
out = append(out, rowMap)
}
if err := results.Err(); err != nil {
return nil, fmt.Errorf("error reading query results: %w", err)
}
return out, nil
}
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
return tools.ParseParams(t.AllParams, data, claims)
}
func (t Tool) Manifest() tools.Manifest {
return t.manifest
}
func (t Tool) McpManifest() tools.McpManifest {
return t.mcpManifest
}
func (t Tool) Authorized(verifiedAuthServices []string) bool {
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
}
func (t Tool) RequiresClientAuthorization() bool {
return false
}

View File

@@ -0,0 +1,76 @@
// Copyright 2025 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package postgreslisttables_test
import (
"testing"
yaml "github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/testutils"
postgreslisttables "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
)
func TestParseFromYamlPostgresListTables(t *testing.T) {
ctx, err := testutils.ContextWithNewLogger()
if err != nil {
t.Fatalf("unexpected error: %s", err)
}
tcs := []struct {
desc string
in string
want server.ToolConfigs
}{
{
desc: "basic example",
in: `
tools:
example_tool:
kind: postgres-list-tables
source: my-postgres-instance
description: some description
authRequired:
- my-google-auth-service
- other-auth-service
`,
want: server.ToolConfigs{
"example_tool": postgreslisttables.Config{
Name: "example_tool",
Kind: "postgres-list-tables",
Source: "my-postgres-instance",
Description: "some description",
AuthRequired: []string{"my-google-auth-service", "other-auth-service"},
},
},
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
got := struct {
Tools server.ToolConfigs `yaml:"tools"`
}{}
// Parse contents
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
if err != nil {
t.Fatalf("unable to unmarshal: %s", err)
}
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
t.Fatalf("incorrect parse: diff %v", diff)
}
})
}
}

View File

@@ -15,11 +15,17 @@
package postgres
import (
"bytes"
"context"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"os"
"reflect"
"regexp"
"sort"
"strings"
"testing"
"time"
@@ -31,13 +37,14 @@ import (
)
var (
PostgresSourceKind = "postgres"
PostgresToolKind = "postgres-sql"
PostgresDatabase = os.Getenv("POSTGRES_DATABASE")
PostgresHost = os.Getenv("POSTGRES_HOST")
PostgresPort = os.Getenv("POSTGRES_PORT")
PostgresUser = os.Getenv("POSTGRES_USER")
PostgresPass = os.Getenv("POSTGRES_PASS")
PostgresSourceKind = "postgres"
PostgresToolKind = "postgres-sql"
PostgresListTablesToolKind = "postgres-list-tables"
PostgresDatabase = os.Getenv("POSTGRES_DATABASE")
PostgresHost = os.Getenv("POSTGRES_HOST")
PostgresPort = os.Getenv("POSTGRES_PORT")
PostgresUser = os.Getenv("POSTGRES_USER")
PostgresPass = os.Getenv("POSTGRES_PASS")
)
func getPostgresVars(t *testing.T) map[string]any {
@@ -64,6 +71,20 @@ func getPostgresVars(t *testing.T) map[string]any {
}
}
func addPrebuiltToolConfig(t *testing.T, config map[string]any) map[string]any {
tools, ok := config["tools"].(map[string]any)
if !ok {
t.Fatalf("unable to get tools from config")
}
tools["list_tables"] = map[string]any{
"kind": PostgresListTablesToolKind,
"source": "my-instance",
"description": "Lists tables in the database.",
}
config["tools"] = tools
return config
}
// Copied over from postgres.go
func initPostgresConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) {
// urlExample := "postgres:dd//username:password@localhost:5432/database_name"
@@ -114,6 +135,8 @@ func TestPostgres(t *testing.T) {
tmplSelectCombined, tmplSelectFilterCombined := tests.GetPostgresSQLTmplToolStatement()
toolsFile = tests.AddTemplateParamConfig(t, toolsFile, PostgresToolKind, tmplSelectCombined, tmplSelectFilterCombined, "")
toolsFile = addPrebuiltToolConfig(t, toolsFile)
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
if err != nil {
t.Fatalf("command initialization returned an error: %s", err)
@@ -137,4 +160,165 @@ func TestPostgres(t *testing.T) {
tests.RunMCPToolCallMethod(t, mcpMyFailToolWant, mcpSelect1Want)
tests.RunExecuteSqlToolInvokeTest(t, createTableStatement, select1Want)
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
// Run specific Postgres tool tests
runPostgresListTablesTest(t, tableNameParam, tableNameAuth)
}
func runPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth string) {
// TableNameParam columns to construct want
paramTableColumns := fmt.Sprintf(`[
{"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null},
{"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null}
]`, tableNameParam)
// TableNameAuth columns to construct want
authTableColumns := fmt.Sprintf(`[
{"data_type": "integer", "column_name": "id", "column_default": "nextval('%s_id_seq'::regclass)", "is_not_nullable": true, "ordinal_position": 1, "column_comment": null},
{"data_type": "text", "column_name": "name", "column_default": null, "is_not_nullable": false, "ordinal_position": 2, "column_comment": null},
{"data_type": "text", "column_name": "email", "column_default": null, "is_not_nullable": false, "ordinal_position": 3, "column_comment": null}
]`, tableNameAuth)
const (
// Template to construct detailed output want
detailedObjectTemplate = `{
"object_name": "%[1]s", "schema_name": "public",
"object_details": {
"owner": "%[3]s", "comment": null,
"indexes": [{"is_primary": true, "is_unique": true, "index_name": "%[1]s_pkey", "index_method": "btree", "index_columns": ["id"], "index_definition": "CREATE UNIQUE INDEX %[1]s_pkey ON public.%[1]s USING btree (id)"}],
"triggers": [], "columns": %[2]s, "object_name": "%[1]s", "object_type": "TABLE", "schema_name": "public",
"constraints": [{"constraint_name": "%[1]s_pkey", "constraint_type": "PRIMARY KEY", "constraint_columns": ["id"], "constraint_definition": "PRIMARY KEY (id)", "foreign_key_referenced_table": null, "foreign_key_referenced_columns": null}]
}
}`
// Template to construct simple output want
simpleObjectTemplate = `{"object_name":"%s", "schema_name":"public", "object_details":{"name":"%s"}}`
)
// Helper to build json for detailed want
getDetailedWant := func(tableName, columnJSON string) string {
return fmt.Sprintf(detailedObjectTemplate, tableName, columnJSON, PostgresUser)
}
// Helper to build template for simple want
getSimpleWant := func(tableName string) string {
return fmt.Sprintf(simpleObjectTemplate, tableName, tableName)
}
invokeTcs := []struct {
name string
api string
requestBody io.Reader
wantStatusCode int
want string
}{
{
name: "invoke list_tables detailed output",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s"}`,tableNameAuth))),
wantStatusCode: http.StatusOK,
want: fmt.Sprintf("[%s]", getDetailedWant(tableNameAuth, authTableColumns)),
},
{
name: "invoke list_tables simple output",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s", "output_format": "simple"}`, tableNameAuth))),
wantStatusCode: http.StatusOK,
want: fmt.Sprintf("[%s]", getSimpleWant(tableNameAuth)),
},
{
name: "invoke list_tables with invalid output format",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "abcd"}`)),
wantStatusCode: http.StatusBadRequest,
},
{
name: "invoke list_tables with malformed table_names parameter",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(`{"table_names": 12345, "output_format": "detailed"}`)),
wantStatusCode: http.StatusBadRequest,
},
{
name: "invoke list_tables with multiple table names",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,%s"}`, tableNameParam, tableNameAuth))),
wantStatusCode: http.StatusOK,
want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)),
},
{
name: "invoke list_tables with non-existent table",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(`{"table_names": "non_existent_table"}`)),
wantStatusCode: http.StatusOK,
want: `null`,
},
{
name: "invoke list_tables with one existing and one non-existent table",
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf(`{"table_names": "%s,non_existent_table"}`, tableNameParam))),
wantStatusCode: http.StatusOK,
want: fmt.Sprintf("[%s]", getDetailedWant(tableNameParam, paramTableColumns)),
},
}
for _, tc := range invokeTcs {
t.Run(tc.name, func(t *testing.T) {
req, err := http.NewRequest(http.MethodPost, tc.api, tc.requestBody)
if err != nil {
t.Fatalf("unable to create request: %s", err)
}
req.Header.Add("Content-type", "application/json")
resp, err := http.DefaultClient.Do(req)
if err != nil {
t.Fatalf("unable to send request: %s", err)
}
defer resp.Body.Close()
if resp.StatusCode != tc.wantStatusCode {
bodyBytes, _ := io.ReadAll(resp.Body)
t.Fatalf("response status code is not 200, got %d: %s", resp.StatusCode, string(bodyBytes))
}
if tc.wantStatusCode == http.StatusOK {
var bodyWrapper map[string]json.RawMessage
respBytes, err := io.ReadAll(resp.Body)
if err != nil {
t.Fatalf("error reading response body: %s", err)
}
if err := json.Unmarshal(respBytes, &bodyWrapper); err != nil {
t.Fatalf("error parsing response wrapper: %s, body: %s", err, string(respBytes))
}
resultJSON, ok := bodyWrapper["result"]
if !ok {
t.Fatal("unable to find 'result' in response body")
}
var resultString string
if err := json.Unmarshal(resultJSON, &resultString); err != nil {
t.Fatalf("'result' is not a JSON-encoded string: %s", err)
}
var got, want []any
if err := json.Unmarshal([]byte(resultString), &got); err != nil {
t.Fatalf("failed to unmarshal actual result string: %v", err)
}
if err := json.Unmarshal([]byte(tc.want), &want); err != nil {
t.Fatalf("failed to unmarshal expected want string: %v", err)
}
sort.SliceStable(got, func(i, j int) bool {
return fmt.Sprintf("%v", got[i]) < fmt.Sprintf("%v", got[j])
})
sort.SliceStable(want, func(i, j int) bool {
return fmt.Sprintf("%v", want[i]) < fmt.Sprintf("%v", want[j])
})
if !reflect.DeepEqual(got, want) {
t.Errorf("Unexpected result: got %#v, want: %#v", got, want)
}
}
})
}
}