mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-09 15:38:08 -05:00
feat: Adding support for the --prebuilt flag (#604)
Introduces a new --prebuilt <source_type> flag. This flag is mutually exclusive with the existing --tools-file flag. Added a new directory cmd/prebuiltconfigs/ to store the prebuilt tools.yaml files (e.g., alloydb.yaml, postgres.yaml, etc.). These YAML files are embedded into the Go binary using the //go:embed directive. --------- Co-authored-by: Averi Kitsch <akitsch@google.com>
This commit is contained in:
55
cmd/root.go
55
cmd/root.go
@@ -28,6 +28,7 @@ import (
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/log"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/telemetry"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
@@ -68,12 +69,13 @@ func Execute() {
|
||||
type Command struct {
|
||||
*cobra.Command
|
||||
|
||||
cfg server.ServerConfig
|
||||
logger log.Logger
|
||||
tools_file string
|
||||
inStream io.Reader
|
||||
outStream io.Writer
|
||||
errStream io.Writer
|
||||
cfg server.ServerConfig
|
||||
logger log.Logger
|
||||
tools_file string
|
||||
prebuiltConfig string
|
||||
inStream io.Reader
|
||||
outStream io.Writer
|
||||
errStream io.Writer
|
||||
}
|
||||
|
||||
// NewCommand returns a Command object representing an invocation of the CLI.
|
||||
@@ -110,15 +112,16 @@ func NewCommand(opts ...Option) *Command {
|
||||
flags.StringVarP(&cmd.cfg.Address, "address", "a", "127.0.0.1", "Address of the interface the server will listen on.")
|
||||
flags.IntVarP(&cmd.cfg.Port, "port", "p", 5000, "Port the server will listen on.")
|
||||
|
||||
flags.StringVar(&cmd.tools_file, "tools_file", "tools.yaml", "File path specifying the tool configuration.")
|
||||
flags.StringVar(&cmd.tools_file, "tools_file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
|
||||
// deprecate tools_file
|
||||
_ = flags.MarkDeprecated("tools_file", "please use --tools-file instead")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "tools.yaml", "File path specifying the tool configuration.")
|
||||
flags.StringVar(&cmd.tools_file, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --prebuilt.")
|
||||
flags.Var(&cmd.cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
|
||||
flags.Var(&cmd.cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
|
||||
flags.BoolVar(&cmd.cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
|
||||
flags.StringVar(&cmd.cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
|
||||
flags.StringVar(&cmd.cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
|
||||
flags.StringVar(&cmd.prebuiltConfig, "prebuilt", "", "Use a prebuilt tool configuration by source type. Cannot be used with --tools-file. Allowed: 'alloydb-postgres', 'bigquery', 'cloud-sql-mysql', 'cloud-sql-postgres', 'cloud-sql-mssql', 'postgres', 'spanner', 'spanner-postgres'.")
|
||||
flags.BoolVar(&cmd.cfg.Stdio, "stdio", false, "Listens via MCP STDIO instead of acting as a remote HTTP server.")
|
||||
|
||||
// wrap RunE command so that we have access to original Command object
|
||||
@@ -245,13 +248,37 @@ func run(cmd *Command) error {
|
||||
}
|
||||
}()
|
||||
|
||||
// Read tool file contents
|
||||
buf, err := os.ReadFile(cmd.tools_file)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
var buf []byte
|
||||
|
||||
if cmd.prebuiltConfig != "" {
|
||||
// Make sure --prebuilt and --tools-file flags are mutually exclusive
|
||||
if cmd.tools_file != "" {
|
||||
errMsg := fmt.Errorf("--prebuilt and --tools-file flags cannot be used simultaneously")
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
// Use prebuilt tools
|
||||
buf, err = prebuiltconfigs.Get(cmd.prebuiltConfig)
|
||||
if err != nil {
|
||||
cmd.logger.ErrorContext(ctx, err.Error())
|
||||
return err
|
||||
}
|
||||
logMsg := fmt.Sprint("Using prebuilt tool configuration for ", cmd.prebuiltConfig)
|
||||
cmd.logger.InfoContext(ctx, logMsg)
|
||||
} else {
|
||||
// Set default value of tools-file flag to tools.yaml
|
||||
if cmd.tools_file == "" {
|
||||
cmd.tools_file = "tools.yaml"
|
||||
}
|
||||
// Read tool file contents
|
||||
buf, err = os.ReadFile(cmd.tools_file)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("unable to read tool file at %q: %w", cmd.tools_file, err)
|
||||
cmd.logger.ErrorContext(ctx, errMsg.Error())
|
||||
return errMsg
|
||||
}
|
||||
}
|
||||
|
||||
toolsFile, err := parseToolsFile(ctx, buf)
|
||||
cmd.cfg.SourceConfigs, cmd.cfg.AuthServiceConfigs, cmd.cfg.ToolConfigs, cmd.cfg.ToolsetConfigs = toolsFile.Sources, toolsFile.AuthServices, toolsFile.Tools, toolsFile.Toolsets
|
||||
authSourceConfigs := toolsFile.AuthSources
|
||||
|
||||
146
cmd/root_test.go
146
cmd/root_test.go
@@ -24,6 +24,7 @@ import (
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/auth/google"
|
||||
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
cloudsqlpgsrc "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
|
||||
httpsrc "github.com/googleapis/genai-toolbox/internal/sources/http"
|
||||
@@ -194,7 +195,7 @@ func TestToolFileFlag(t *testing.T) {
|
||||
{
|
||||
desc: "default value",
|
||||
args: []string{},
|
||||
want: "tools.yaml",
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
desc: "foo file",
|
||||
@@ -225,6 +226,36 @@ func TestToolFileFlag(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestPrebuiltFlag(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
args []string
|
||||
want string
|
||||
}{
|
||||
{
|
||||
desc: "default value",
|
||||
args: []string{},
|
||||
want: "",
|
||||
},
|
||||
{
|
||||
desc: "custom pre built flag",
|
||||
args: []string{"--tools-file", "alloydb"},
|
||||
want: "alloydb",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
c, _, err := invokeCommand(tc.args)
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error invoking command: %s", err)
|
||||
}
|
||||
if c.tools_file != tc.want {
|
||||
t.Fatalf("got %v, want %v", c.cfg, tc.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailServerConfigFlags(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
@@ -866,6 +897,119 @@ func TestEnvVarReplacement(t *testing.T) {
|
||||
|
||||
}
|
||||
|
||||
func TestPrebuiltTools(t *testing.T) {
|
||||
alloydb_config, _ := prebuiltconfigs.Get("alloydb-postgres")
|
||||
bigquery_config, _ := prebuiltconfigs.Get("bigquery")
|
||||
cloudsqlpg_config, _ := prebuiltconfigs.Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := prebuiltconfigs.Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := prebuiltconfigs.Get("cloud-sql-mssql")
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
name string
|
||||
in []byte
|
||||
wantToolset server.ToolsetConfigs
|
||||
}{
|
||||
{
|
||||
name: "alloydb prebuilt tools",
|
||||
in: alloydb_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"alloydb-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "alloydb-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bigquery prebuilt tools",
|
||||
in: bigquery_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"bigquery-database-tools": tools.ToolsetConfig{
|
||||
Name: "bigquery-database-tools",
|
||||
ToolNames: []string{"execute_sql", "get_dataset_info", "get_table_info", "list_dataset_ids", "list_table_ids"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "cloudsqlpg prebuilt tools",
|
||||
in: cloudsqlpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud-sql-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "cloudsqlmysql prebuilt tools",
|
||||
in: cloudsqlmysql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud-sql-mysql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mysql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "cloudsqlmssql prebuilt tools",
|
||||
in: cloudsqlmssql_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"cloud-sql-mssql-database-tools": tools.ToolsetConfig{
|
||||
Name: "cloud-sql-mssql-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "postgres prebuilt tools",
|
||||
in: postgresconfig,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "spanner prebuilt tools",
|
||||
in: spanner_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"spanner-database-tools": tools.ToolsetConfig{
|
||||
Name: "spanner-database-tools",
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "spanner pg prebuilt tools",
|
||||
in: spannerpg_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"spanner-postgres-database-tools": tools.ToolsetConfig{
|
||||
Name: "spanner-postgres-database-tools",
|
||||
ToolNames: []string{"execute_sql", "execute_sql_dql", "list_tables"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
toolsFile, err := parseToolsFile(ctx, tc.in)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to parse input: %v", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.wantToolset, toolsFile.Toolsets); diff != "" {
|
||||
t.Fatalf("incorrect tools parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateLogLevel(t *testing.T) {
|
||||
tcs := []struct {
|
||||
desc string
|
||||
|
||||
87
internal/prebuiltconfigs/prebuiltconfigs.go
Normal file
87
internal/prebuiltconfigs/prebuiltconfigs.go
Normal file
@@ -0,0 +1,87 @@
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package prebuiltconfigs
|
||||
|
||||
import (
|
||||
"embed"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed tools/*.yaml
|
||||
prebuiltConfigsFS embed.FS
|
||||
|
||||
// Map of sources to their prebuilt tools
|
||||
prebuiltToolYAMLs map[string][]byte
|
||||
// List of sources with prebuilt tools
|
||||
prebuiltToolsSources []string
|
||||
)
|
||||
|
||||
func init() {
|
||||
var err error
|
||||
prebuiltToolYAMLs, prebuiltToolsSources, err = loadPrebuiltToolYAMLs()
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("Unexpected Error: %v\n", err))
|
||||
}
|
||||
}
|
||||
|
||||
// Get prebuilt tools for a source
|
||||
func Get(prebuiltSourceConfig string) ([]byte, error) {
|
||||
content, ok := prebuiltToolYAMLs[prebuiltSourceConfig]
|
||||
if !ok {
|
||||
prebuiltHelpSuffix := "no prebuilt configurations found."
|
||||
if len(prebuiltToolsSources) > 0 {
|
||||
prebuiltHelpSuffix = fmt.Sprintf("available: %s", strings.Join(prebuiltToolsSources, ", "))
|
||||
}
|
||||
errMsg := fmt.Errorf("prebuilt source tool for '%s' not found. %s", prebuiltSourceConfig, prebuiltHelpSuffix)
|
||||
return nil, errMsg
|
||||
}
|
||||
return content, nil
|
||||
}
|
||||
|
||||
// Load all available pre built tools
|
||||
func loadPrebuiltToolYAMLs() (map[string][]byte, []string, error) {
|
||||
toolYAMLs := make(map[string][]byte)
|
||||
var sourceTypes []string
|
||||
entries, err := prebuiltConfigsFS.ReadDir("tools")
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("failed to read prebuilt tools %w", err)
|
||||
return nil, nil, errMsg
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
lowerName := strings.ToLower(entry.Name())
|
||||
if !entry.IsDir() && (strings.HasSuffix(lowerName, ".yaml")) {
|
||||
filePathInFS := filepath.Join("tools", entry.Name())
|
||||
content, err := prebuiltConfigsFS.ReadFile(filePathInFS)
|
||||
if err != nil {
|
||||
errMsg := fmt.Errorf("failed to read a prebuilt tool %w", err)
|
||||
return nil, nil, errMsg
|
||||
}
|
||||
sourceTypeKey := entry.Name()[:len(entry.Name())-len(".yaml")]
|
||||
|
||||
sourceTypes = append(sourceTypes, sourceTypeKey)
|
||||
toolYAMLs[sourceTypeKey] = content
|
||||
}
|
||||
}
|
||||
if len(toolYAMLs) == 0 {
|
||||
errMsg := fmt.Errorf("no prebuilt tool configurations were loaded.%w", err)
|
||||
return nil, nil, errMsg
|
||||
}
|
||||
|
||||
return toolYAMLs, sourceTypes, nil
|
||||
}
|
||||
105
internal/prebuiltconfigs/prebuiltconfigs_test.go
Normal file
105
internal/prebuiltconfigs/prebuiltconfigs_test.go
Normal file
@@ -0,0 +1,105 @@
|
||||
// Copyright 2024 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package prebuiltconfigs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
)
|
||||
|
||||
func TestLoadPrebuiltToolYAMLs(t *testing.T) {
|
||||
test_name := "test load prebuilt configs"
|
||||
expectedKeys := []string{
|
||||
"alloydb-postgres",
|
||||
"bigquery",
|
||||
"cloud-sql-mssql",
|
||||
"cloud-sql-mysql",
|
||||
"cloud-sql-postgres",
|
||||
"postgres",
|
||||
"spanner-postgres",
|
||||
"spanner",
|
||||
}
|
||||
t.Run(test_name, func(t *testing.T) {
|
||||
configsMap, keys, err := loadPrebuiltToolYAMLs()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
foundExpectedKeys := make(map[string]bool)
|
||||
|
||||
if len(expectedKeys) != len(configsMap) {
|
||||
t.Fatalf("Failed to load all prebuilt tools.")
|
||||
}
|
||||
|
||||
for _, expectedKey := range expectedKeys {
|
||||
_, ok := configsMap[expectedKey]
|
||||
if !ok {
|
||||
t.Fatalf("Prebuilt tools for '%s' was NOT FOUND in the loaded map.", expectedKey)
|
||||
} else {
|
||||
foundExpectedKeys[expectedKey] = true // Mark as found
|
||||
}
|
||||
}
|
||||
|
||||
t.Log(expectedKeys)
|
||||
t.Log(keys)
|
||||
|
||||
if diff := cmp.Diff(expectedKeys, keys); diff != "" {
|
||||
t.Fatalf("incorrect sources parse: diff %v", diff)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
func TestGetPrebuiltTool(t *testing.T) {
|
||||
alloydb_config, _ := Get("alloydb-postgres")
|
||||
bigquery_config, _ := Get("bigquery")
|
||||
cloudsqlpg_config, _ := Get("cloud-sql-postgres")
|
||||
cloudsqlmysql_config, _ := Get("cloud-sql-mysql")
|
||||
cloudsqlmssql_config, _ := Get("cloud-sql-mssql")
|
||||
postgresconfig, _ := Get("postgres")
|
||||
spanner_config, _ := Get("spanner")
|
||||
spannerpg_config, _ := Get("spanner-postgres")
|
||||
if len(alloydb_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch alloydb prebuilt tools yaml")
|
||||
}
|
||||
if len(bigquery_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch bigquery prebuilt tools yaml")
|
||||
}
|
||||
if len(cloudsqlpg_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch cloud sql pg prebuilt tools yaml")
|
||||
}
|
||||
if len(cloudsqlmysql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch cloud sql mysql prebuilt tools yaml")
|
||||
}
|
||||
if len(cloudsqlmssql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch cloud sql mssql prebuilt tools yaml")
|
||||
}
|
||||
if len(postgresconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch postgres prebuilt tools yaml")
|
||||
}
|
||||
if len(spanner_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner prebuilt tools yaml")
|
||||
}
|
||||
if len(spannerpg_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch spanner pg prebuilt tools yaml")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailGetPrebuiltTool(t *testing.T) {
|
||||
_, err := Get("sql")
|
||||
if err == nil {
|
||||
t.Fatalf("unexpected an error but got nil.")
|
||||
}
|
||||
}
|
||||
100
internal/prebuiltconfigs/tools/alloydb-postgres.yaml
Normal file
100
internal/prebuiltconfigs/tools/alloydb-postgres.yaml
Normal file
@@ -0,0 +1,100 @@
|
||||
sources:
|
||||
alloydb-pg-source:
|
||||
kind: "alloydb-postgres"
|
||||
project: ${ALLOYDB_POSTGRES_PROJECT}
|
||||
region: ${ALLOYDB_POSTGRES_REGION}
|
||||
cluster: ${ALLOYDB_POSTGRES_CLUSTER}
|
||||
instance: ${ALLOYDB_POSTGRES_INSTANCE}
|
||||
database: ${ALLOYDB_POSTGRES_DATABASE}
|
||||
user: ${ALLOYDB_POSTGRES_USER}
|
||||
password: ${ALLOYDB_POSTGRES_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: alloydb-pg-source
|
||||
description: Use this tool to execute sql.
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: alloydb-pg-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
|
||||
toolsets:
|
||||
alloydb-postgres-database-tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
38
internal/prebuiltconfigs/tools/bigquery.yaml
Normal file
38
internal/prebuiltconfigs/tools/bigquery.yaml
Normal file
@@ -0,0 +1,38 @@
|
||||
sources:
|
||||
bigquery-source:
|
||||
kind: "bigquery"
|
||||
project: ${BIGQUERY_PROJECT}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: bigquery-execute-sql
|
||||
source: bigquery-source
|
||||
description: Use this tool to execute sql statement.
|
||||
|
||||
get_dataset_info:
|
||||
kind: bigquery-get-dataset-info
|
||||
source: bigquery-source
|
||||
description: Use this tool to get dataset metadata.
|
||||
|
||||
get_table_info:
|
||||
kind: bigquery-get-table-info
|
||||
source: bigquery-source
|
||||
description: Use this tool to get table metadata.
|
||||
|
||||
list_dataset_ids:
|
||||
kind: bigquery-list-dataset-ids
|
||||
source: bigquery-source
|
||||
description: Use this tool to get dataset metadata.
|
||||
|
||||
list_table_ids:
|
||||
kind: bigquery-list-table-ids
|
||||
source: bigquery-source
|
||||
description: Use this tool to get table metadata.
|
||||
|
||||
toolsets:
|
||||
bigquery-database-tools:
|
||||
- execute_sql
|
||||
- get_dataset_info
|
||||
- get_table_info
|
||||
- list_dataset_ids
|
||||
- list_table_ids
|
||||
271
internal/prebuiltconfigs/tools/cloud-sql-mssql.yaml
Normal file
271
internal/prebuiltconfigs/tools/cloud-sql-mssql.yaml
Normal file
@@ -0,0 +1,271 @@
|
||||
sources:
|
||||
cloudsql-mssql-source:
|
||||
kind: cloud-sql-mssql
|
||||
project: ${CLOUD_SQL_MSSQL_PROJECT}
|
||||
region: ${CLOUD_SQL_MSSQL_REGION}
|
||||
instance: ${CLOUD_SQL_MSSQL_INSTANCE}
|
||||
database: ${CLOUD_SQL_MSSQL_DATABASE}
|
||||
ipAddress: ${CLOUD_SQL_MSSQL_IP_ADDRESS}
|
||||
user: ${CLOUD_SQL_MSSQL_USER}
|
||||
password: ${CLOUD_SQL_MSSQL_PASSWORD}
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: mssql-execute-sql
|
||||
source: cloud-sql-mssql-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: mssql-sql
|
||||
source: cloudsql-mssql-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH table_info AS (
|
||||
SELECT
|
||||
t.object_id AS table_oid,
|
||||
s.name AS schema_name,
|
||||
t.name AS table_name,
|
||||
dp.name AS table_owner, -- Schema's owner principal name
|
||||
CAST(ep.value AS NVARCHAR(MAX)) AS table_comment, -- Cast for JSON compatibility
|
||||
CASE
|
||||
WHEN EXISTS ( -- Check if the table has more than one partition for any of its indexes or heap
|
||||
SELECT 1 FROM sys.partitions p
|
||||
WHERE p.object_id = t.object_id AND p.partition_number > 1
|
||||
) THEN 'PARTITIONED TABLE'
|
||||
ELSE 'TABLE'
|
||||
END AS object_type_detail
|
||||
FROM
|
||||
sys.tables t
|
||||
INNER JOIN
|
||||
sys.schemas s ON t.schema_id = s.schema_id
|
||||
LEFT JOIN
|
||||
sys.database_principals dp ON s.principal_id = dp.principal_id
|
||||
LEFT JOIN
|
||||
sys.extended_properties ep ON ep.major_id = t.object_id AND ep.minor_id = 0 AND ep.class = 1 AND ep.name = 'MS_Description'
|
||||
WHERE
|
||||
t.type = 'U' -- User tables
|
||||
AND s.name NOT IN ('sys', 'INFORMATION_SCHEMA', 'guest', 'db_owner', 'db_accessadmin', 'db_backupoperator', 'db_datareader', 'db_datawriter', 'db_ddladmin', 'db_denydatareader', 'db_denydatawriter', 'db_securityadmin')
|
||||
AND (@table_names IS NULL OR LTRIM(RTRIM(@table_names)) = '' OR t.name IN (SELECT LTRIM(RTRIM(value)) FROM STRING_SPLIT(@table_names, ',')))
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
c.object_id AS table_oid,
|
||||
c.name AS column_name,
|
||||
CONCAT(
|
||||
UPPER(TY.name), -- Base type name
|
||||
CASE
|
||||
WHEN TY.name IN ('char', 'varchar', 'nchar', 'nvarchar', 'binary', 'varbinary') THEN
|
||||
CONCAT('(', IIF(c.max_length = -1, 'MAX', CAST(c.max_length / CASE WHEN TY.name IN ('nchar', 'nvarchar') THEN 2 ELSE 1 END AS VARCHAR(10))), ')')
|
||||
WHEN TY.name IN ('decimal', 'numeric') THEN
|
||||
CONCAT('(', c.precision, ',', c.scale, ')')
|
||||
WHEN TY.name IN ('datetime2', 'datetimeoffset', 'time') THEN
|
||||
CONCAT('(', c.scale, ')')
|
||||
ELSE ''
|
||||
END
|
||||
) AS data_type,
|
||||
c.column_id AS column_ordinal_position,
|
||||
IIF(c.is_nullable = 0, CAST(1 AS BIT), CAST(0 AS BIT)) AS is_not_nullable,
|
||||
dc.definition AS column_default,
|
||||
CAST(epc.value AS NVARCHAR(MAX)) AS column_comment
|
||||
FROM
|
||||
sys.columns c
|
||||
JOIN
|
||||
table_info ti ON c.object_id = ti.table_oid
|
||||
JOIN
|
||||
sys.types TY ON c.user_type_id = TY.user_type_id AND TY.is_user_defined = 0 -- Ensure we get base types
|
||||
LEFT JOIN
|
||||
sys.default_constraints dc ON c.object_id = dc.parent_object_id AND c.column_id = dc.parent_column_id
|
||||
LEFT JOIN
|
||||
sys.extended_properties epc ON epc.major_id = c.object_id AND epc.minor_id = c.column_id AND epc.class = 1 AND epc.name = 'MS_Description'
|
||||
),
|
||||
constraints_info AS (
|
||||
-- Primary Keys & Unique Constraints
|
||||
SELECT
|
||||
kc.parent_object_id AS table_oid,
|
||||
kc.name AS constraint_name,
|
||||
REPLACE(kc.type_desc, '_CONSTRAINT', '') AS constraint_type, -- 'PRIMARY_KEY', 'UNIQUE'
|
||||
STUFF((SELECT ', ' + col.name
|
||||
FROM sys.index_columns ic
|
||||
JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id
|
||||
WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id
|
||||
ORDER BY ic.key_ordinal
|
||||
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS constraint_columns,
|
||||
NULL AS foreign_key_referenced_table,
|
||||
NULL AS foreign_key_referenced_columns,
|
||||
CASE kc.type
|
||||
WHEN 'PK' THEN 'PRIMARY KEY (' + STUFF((SELECT ', ' + col.name FROM sys.index_columns ic JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')'
|
||||
WHEN 'UQ' THEN 'UNIQUE (' + STUFF((SELECT ', ' + col.name FROM sys.index_columns ic JOIN sys.columns col ON ic.object_id = col.object_id AND ic.column_id = col.column_id WHERE ic.object_id = kc.parent_object_id AND ic.index_id = kc.unique_index_id ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')'
|
||||
END AS constraint_definition
|
||||
FROM sys.key_constraints kc
|
||||
JOIN table_info ti ON kc.parent_object_id = ti.table_oid
|
||||
UNION ALL
|
||||
-- Foreign Keys
|
||||
SELECT
|
||||
fk.parent_object_id AS table_oid,
|
||||
fk.name AS constraint_name,
|
||||
'FOREIGN KEY' AS constraint_type,
|
||||
STUFF((SELECT ', ' + pc.name
|
||||
FROM sys.foreign_key_columns fkc
|
||||
JOIN sys.columns pc ON fkc.parent_object_id = pc.object_id AND fkc.parent_column_id = pc.column_id
|
||||
WHERE fkc.constraint_object_id = fk.object_id
|
||||
ORDER BY fkc.constraint_column_id
|
||||
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS constraint_columns,
|
||||
SCHEMA_NAME(rt.schema_id) + '.' + OBJECT_NAME(fk.referenced_object_id) AS foreign_key_referenced_table,
|
||||
STUFF((SELECT ', ' + rc.name
|
||||
FROM sys.foreign_key_columns fkc
|
||||
JOIN sys.columns rc ON fkc.referenced_object_id = rc.object_id AND fkc.referenced_column_id = rc.column_id
|
||||
WHERE fkc.constraint_object_id = fk.object_id
|
||||
ORDER BY fkc.constraint_column_id
|
||||
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS foreign_key_referenced_columns,
|
||||
OBJECT_DEFINITION(fk.object_id) AS constraint_definition
|
||||
FROM sys.foreign_keys fk
|
||||
JOIN sys.tables rt ON fk.referenced_object_id = rt.object_id
|
||||
JOIN table_info ti ON fk.parent_object_id = ti.table_oid
|
||||
UNION ALL
|
||||
-- Check Constraints
|
||||
SELECT
|
||||
cc.parent_object_id AS table_oid,
|
||||
cc.name AS constraint_name,
|
||||
'CHECK' AS constraint_type,
|
||||
NULL AS constraint_columns, -- Definition includes column context
|
||||
NULL AS foreign_key_referenced_table,
|
||||
NULL AS foreign_key_referenced_columns,
|
||||
cc.definition AS constraint_definition
|
||||
FROM sys.check_constraints cc
|
||||
JOIN table_info ti ON cc.parent_object_id = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
i.object_id AS table_oid,
|
||||
i.name AS index_name,
|
||||
i.type_desc AS index_method, -- CLUSTERED, NONCLUSTERED, XML, etc.
|
||||
i.is_unique,
|
||||
i.is_primary_key AS is_primary,
|
||||
STUFF((SELECT ', ' + c.name
|
||||
FROM sys.index_columns ic
|
||||
JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
|
||||
WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 0
|
||||
ORDER BY ic.key_ordinal
|
||||
FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') AS index_columns,
|
||||
(
|
||||
'COLUMNS: (' + ISNULL(STUFF((SELECT ', ' + c.name + CASE WHEN ic.is_descending_key = 1 THEN ' DESC' ELSE '' END
|
||||
FROM sys.index_columns ic
|
||||
JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
|
||||
WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 0
|
||||
ORDER BY ic.key_ordinal FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, ''), 'N/A') + ')' +
|
||||
ISNULL(CHAR(13)+CHAR(10) + 'INCLUDE: (' + STUFF((SELECT ', ' + c.name
|
||||
FROM sys.index_columns ic
|
||||
JOIN sys.columns c ON i.object_id = c.object_id AND ic.column_id = c.column_id
|
||||
WHERE ic.object_id = i.object_id AND ic.index_id = i.index_id AND ic.is_included_column = 1
|
||||
ORDER BY ic.index_column_id FOR XML PATH(''), TYPE).value('.', 'NVARCHAR(MAX)'), 1, 2, '') + ')', '') +
|
||||
ISNULL(CHAR(13)+CHAR(10) + 'FILTER: (' + i.filter_definition + ')', '')
|
||||
) AS index_definition_details
|
||||
FROM
|
||||
sys.indexes i
|
||||
JOIN
|
||||
table_info ti ON i.object_id = ti.table_oid
|
||||
WHERE i.type <> 0 -- Exclude Heaps
|
||||
AND i.name IS NOT NULL -- Exclude unnamed heap indexes; named indexes (PKs are often named) are preferred.
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT
|
||||
tr.parent_id AS table_oid,
|
||||
tr.name AS trigger_name,
|
||||
OBJECT_DEFINITION(tr.object_id) AS trigger_definition,
|
||||
CASE tr.is_disabled WHEN 0 THEN 'ENABLED' ELSE 'DISABLED' END AS trigger_enabled_state
|
||||
FROM
|
||||
sys.triggers tr
|
||||
JOIN
|
||||
table_info ti ON tr.parent_id = ti.table_oid
|
||||
WHERE
|
||||
tr.is_ms_shipped = 0
|
||||
AND tr.parent_class_desc = 'OBJECT_OR_COLUMN' -- DML Triggers on tables/views
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
(
|
||||
SELECT
|
||||
ti.schema_name AS schema_name,
|
||||
ti.table_name AS object_name,
|
||||
ti.object_type_detail AS object_type,
|
||||
ti.table_owner AS owner,
|
||||
ti.table_comment AS comment,
|
||||
JSON_QUERY(ISNULL((
|
||||
SELECT
|
||||
ci.column_name,
|
||||
ci.data_type,
|
||||
ci.column_ordinal_position,
|
||||
ci.is_not_nullable,
|
||||
ci.column_default,
|
||||
ci.column_comment
|
||||
FROM columns_info ci
|
||||
WHERE ci.table_oid = ti.table_oid
|
||||
ORDER BY ci.column_ordinal_position
|
||||
FOR JSON PATH
|
||||
), '[]')) AS columns,
|
||||
JSON_QUERY(ISNULL((
|
||||
SELECT
|
||||
cons.constraint_name,
|
||||
cons.constraint_type,
|
||||
cons.constraint_definition,
|
||||
JSON_QUERY(
|
||||
CASE
|
||||
WHEN cons.constraint_columns IS NOT NULL AND LTRIM(RTRIM(cons.constraint_columns)) <> ''
|
||||
THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(cons.constraint_columns, ',')) + ']'
|
||||
ELSE '[]'
|
||||
END
|
||||
) AS constraint_columns,
|
||||
cons.foreign_key_referenced_table,
|
||||
JSON_QUERY(
|
||||
CASE
|
||||
WHEN cons.foreign_key_referenced_columns IS NOT NULL AND LTRIM(RTRIM(cons.foreign_key_referenced_columns)) <> ''
|
||||
THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(cons.foreign_key_referenced_columns, ',')) + ']'
|
||||
ELSE '[]'
|
||||
END
|
||||
) AS foreign_key_referenced_columns
|
||||
FROM constraints_info cons
|
||||
WHERE cons.table_oid = ti.table_oid
|
||||
FOR JSON PATH
|
||||
), '[]')) AS constraints,
|
||||
JSON_QUERY(ISNULL((
|
||||
SELECT
|
||||
ii.index_name,
|
||||
ii.index_definition_details AS index_definition,
|
||||
ii.is_unique,
|
||||
ii.is_primary,
|
||||
ii.index_method,
|
||||
JSON_QUERY(
|
||||
CASE
|
||||
WHEN ii.index_columns IS NOT NULL AND LTRIM(RTRIM(ii.index_columns)) <> ''
|
||||
THEN '[' + (SELECT STRING_AGG('"' + LTRIM(RTRIM(value)) + '"', ',') FROM STRING_SPLIT(ii.index_columns, ',')) + ']'
|
||||
ELSE '[]'
|
||||
END
|
||||
) AS index_columns
|
||||
FROM indexes_info ii
|
||||
WHERE ii.table_oid = ti.table_oid
|
||||
FOR JSON PATH
|
||||
), '[]')) AS indexes,
|
||||
JSON_QUERY(ISNULL((
|
||||
SELECT
|
||||
tri.trigger_name,
|
||||
tri.trigger_definition,
|
||||
tri.trigger_enabled_state
|
||||
FROM triggers_info tri
|
||||
WHERE tri.table_oid = ti.table_oid
|
||||
FOR JSON PATH
|
||||
), '[]')) AS triggers
|
||||
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER -- Creates a single JSON object for this table's details
|
||||
) AS object_details
|
||||
FROM
|
||||
table_info ti
|
||||
ORDER BY
|
||||
ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
|
||||
toolsets:
|
||||
cloud-sql-mssql-database-tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
170
internal/prebuiltconfigs/tools/cloud-sql-mysql.yaml
Normal file
170
internal/prebuiltconfigs/tools/cloud-sql-mysql.yaml
Normal file
@@ -0,0 +1,170 @@
|
||||
sources:
|
||||
cloud-sql-mysql-source:
|
||||
kind: cloud-sql-mysql
|
||||
project: ${CLOUD_SQL_MYSQL_PROJECT}
|
||||
region: ${CLOUD_SQL_MYSQL_REGION}
|
||||
instance: ${CLOUD_SQL_MYSQL_INSTANCE}
|
||||
database: ${CLOUD_SQL_MYSQL_DATABASE}
|
||||
user: ${CLOUD_SQL_MYSQL_USER}
|
||||
password: ${CLOUD_SQL_MYSQL_PASSWORD}
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: mysql-execute-sql
|
||||
source: cloud-sql-mysql-source
|
||||
description: Use this tool to execute SQL.
|
||||
list_tables:
|
||||
kind: mysql-sql
|
||||
source: cloud-sql-mysql-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
SELECT
|
||||
T.TABLE_SCHEMA AS schema_name,
|
||||
T.TABLE_NAME AS object_name,
|
||||
CONVERT( JSON_OBJECT(
|
||||
'schema_name', T.TABLE_SCHEMA,
|
||||
'object_name', T.TABLE_NAME,
|
||||
'object_type', 'TABLE',
|
||||
'owner', (
|
||||
SELECT
|
||||
IFNULL(U.GRANTEE, 'N/A')
|
||||
FROM
|
||||
INFORMATION_SCHEMA.SCHEMA_PRIVILEGES U
|
||||
WHERE
|
||||
U.TABLE_SCHEMA = T.TABLE_SCHEMA
|
||||
LIMIT 1
|
||||
),
|
||||
'comment', IFNULL(T.TABLE_COMMENT, ''),
|
||||
'columns', (
|
||||
SELECT
|
||||
IFNULL(
|
||||
JSON_ARRAYAGG(
|
||||
JSON_OBJECT(
|
||||
'column_name', C.COLUMN_NAME,
|
||||
'data_type', C.COLUMN_TYPE,
|
||||
'ordinal_position', C.ORDINAL_POSITION,
|
||||
'is_not_nullable', IF(C.IS_NULLABLE = 'NO', TRUE, FALSE),
|
||||
'column_default', C.COLUMN_DEFAULT,
|
||||
'column_comment', IFNULL(C.COLUMN_COMMENT, '')
|
||||
)
|
||||
),
|
||||
JSON_ARRAY()
|
||||
)
|
||||
FROM
|
||||
INFORMATION_SCHEMA.COLUMNS C
|
||||
WHERE
|
||||
C.TABLE_SCHEMA = T.TABLE_SCHEMA AND C.TABLE_NAME = T.TABLE_NAME
|
||||
ORDER BY C.ORDINAL_POSITION
|
||||
),
|
||||
'constraints', (
|
||||
SELECT
|
||||
IFNULL(
|
||||
JSON_ARRAYAGG(
|
||||
JSON_OBJECT(
|
||||
'constraint_name', TC.CONSTRAINT_NAME,
|
||||
'constraint_type',
|
||||
CASE TC.CONSTRAINT_TYPE
|
||||
WHEN 'PRIMARY KEY' THEN 'PRIMARY KEY'
|
||||
WHEN 'FOREIGN KEY' THEN 'FOREIGN KEY'
|
||||
WHEN 'UNIQUE' THEN 'UNIQUE'
|
||||
ELSE TC.CONSTRAINT_TYPE
|
||||
END,
|
||||
'constraint_definition', '',
|
||||
'constraint_columns', (
|
||||
SELECT
|
||||
IFNULL(JSON_ARRAYAGG(KCU.COLUMN_NAME), JSON_ARRAY())
|
||||
FROM
|
||||
INFORMATION_SCHEMA.KEY_COLUMN_USAGE KCU
|
||||
WHERE
|
||||
KCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA
|
||||
AND KCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME
|
||||
AND KCU.TABLE_NAME = TC.TABLE_NAME
|
||||
ORDER BY KCU.ORDINAL_POSITION
|
||||
),
|
||||
'foreign_key_referenced_table', IF(TC.CONSTRAINT_TYPE = 'FOREIGN KEY', RC.REFERENCED_TABLE_NAME, NULL),
|
||||
'foreign_key_referenced_columns', IF(TC.CONSTRAINT_TYPE = 'FOREIGN KEY',
|
||||
(SELECT IFNULL(JSON_ARRAYAGG(FKCU.REFERENCED_COLUMN_NAME), JSON_ARRAY())
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE FKCU
|
||||
WHERE FKCU.CONSTRAINT_SCHEMA = TC.CONSTRAINT_SCHEMA
|
||||
AND FKCU.CONSTRAINT_NAME = TC.CONSTRAINT_NAME
|
||||
AND FKCU.TABLE_NAME = TC.TABLE_NAME
|
||||
AND FKCU.REFERENCED_TABLE_NAME IS NOT NULL
|
||||
ORDER BY FKCU.ORDINAL_POSITION),
|
||||
NULL
|
||||
)
|
||||
)
|
||||
),
|
||||
JSON_ARRAY()
|
||||
)
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
|
||||
LEFT JOIN
|
||||
INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS RC
|
||||
ON TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA
|
||||
AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
|
||||
AND TC.TABLE_NAME = RC.TABLE_NAME
|
||||
WHERE
|
||||
TC.TABLE_SCHEMA = T.TABLE_SCHEMA AND TC.TABLE_NAME = T.TABLE_NAME
|
||||
),
|
||||
'indexes', (
|
||||
SELECT
|
||||
IFNULL(
|
||||
JSON_ARRAYAGG(
|
||||
JSON_OBJECT(
|
||||
'index_name', IndexData.INDEX_NAME,
|
||||
'is_unique', IF(IndexData.NON_UNIQUE = 0, TRUE, FALSE),
|
||||
'is_primary', IF(IndexData.INDEX_NAME = 'PRIMARY', TRUE, FALSE),
|
||||
'index_columns', IFNULL(IndexData.INDEX_COLUMNS_ARRAY, JSON_ARRAY())
|
||||
)
|
||||
),
|
||||
JSON_ARRAY()
|
||||
)
|
||||
FROM (
|
||||
SELECT
|
||||
S.TABLE_SCHEMA,
|
||||
S.TABLE_NAME,
|
||||
S.INDEX_NAME,
|
||||
MIN(S.NON_UNIQUE) AS NON_UNIQUE, -- Aggregate NON_UNIQUE here to get unique status for the index
|
||||
JSON_ARRAYAGG(S.COLUMN_NAME) AS INDEX_COLUMNS_ARRAY -- Aggregate columns into an array for this index
|
||||
FROM
|
||||
INFORMATION_SCHEMA.STATISTICS S
|
||||
WHERE
|
||||
S.TABLE_SCHEMA = T.TABLE_SCHEMA AND S.TABLE_NAME = T.TABLE_NAME
|
||||
GROUP BY
|
||||
S.TABLE_SCHEMA, S.TABLE_NAME, S.INDEX_NAME
|
||||
) AS IndexData
|
||||
ORDER BY IndexData.INDEX_NAME
|
||||
),
|
||||
'triggers', (
|
||||
SELECT
|
||||
IFNULL(
|
||||
JSON_ARRAYAGG(
|
||||
JSON_OBJECT(
|
||||
'trigger_name', TR.TRIGGER_NAME,
|
||||
'trigger_definition', TR.ACTION_STATEMENT
|
||||
)
|
||||
),
|
||||
JSON_ARRAY()
|
||||
)
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TRIGGERS TR
|
||||
WHERE
|
||||
TR.EVENT_OBJECT_SCHEMA = T.TABLE_SCHEMA AND TR.EVENT_OBJECT_TABLE = T.TABLE_NAME
|
||||
ORDER BY TR.TRIGGER_NAME
|
||||
)
|
||||
) USING utf8mb4) AS object_details
|
||||
FROM
|
||||
INFORMATION_SCHEMA.TABLES T
|
||||
WHERE
|
||||
T.TABLE_SCHEMA NOT IN ('mysql', 'information_schema', 'performance_schema', 'sys')
|
||||
AND (NULLIF(TRIM(?), '') IS NULL OR FIND_IN_SET(T.TABLE_NAME, ?))
|
||||
AND T.TABLE_TYPE = 'BASE TABLE'
|
||||
ORDER BY
|
||||
T.TABLE_SCHEMA, T.TABLE_NAME;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
toolsets:
|
||||
cloud-sql-mysql-database-tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
99
internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml
Normal file
99
internal/prebuiltconfigs/tools/cloud-sql-postgres.yaml
Normal file
@@ -0,0 +1,99 @@
|
||||
sources:
|
||||
cloudsql-pg-source:
|
||||
kind: cloud-sql-postgres
|
||||
project: ${CLOUD_SQL_POSTGRES_PROJECT}
|
||||
region: ${CLOUD_SQL_POSTGRES_REGION}
|
||||
instance: ${CLOUD_SQL_POSTGRES_INSTANCE}
|
||||
database: ${CLOUD_SQL_POSTGRES_DATABASE}
|
||||
user: ${CLOUD_SQL_POSTGRES_USER}
|
||||
password: ${CLOUD_SQL_POSTGRES_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: cloudsql-pg-source
|
||||
description: Use this tool to execute sql.
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: cloudsql-pg-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
|
||||
toolsets:
|
||||
cloud-sql-postgres-database-tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
98
internal/prebuiltconfigs/tools/postgres.yaml
Normal file
98
internal/prebuiltconfigs/tools/postgres.yaml
Normal file
@@ -0,0 +1,98 @@
|
||||
sources:
|
||||
postgresql-source:
|
||||
kind: postgres
|
||||
host: ${POSTGRES_HOST}
|
||||
port: ${POSTGRES_PORT}
|
||||
database: ${POSTGRES_DATABASE}
|
||||
user: ${POSTGRES_USER}
|
||||
password: ${POSTGRES_PASSWORD}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: postgres-execute-sql
|
||||
source: postgresql-source
|
||||
description: Use this tool to execute SQL.
|
||||
|
||||
list_tables:
|
||||
kind: postgres-sql
|
||||
source: postgresql-source
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH desired_relkinds AS (
|
||||
SELECT ARRAY['r', 'p']::char[] AS kinds -- Always consider both 'TABLE' and 'PARTITIONED TABLE'
|
||||
),
|
||||
table_info AS (
|
||||
SELECT
|
||||
t.oid AS table_oid,
|
||||
ns.nspname AS schema_name,
|
||||
t.relname AS table_name,
|
||||
pg_get_userbyid(t.relowner) AS table_owner,
|
||||
obj_description(t.oid, 'pg_class') AS table_comment,
|
||||
t.relkind AS object_kind
|
||||
FROM
|
||||
pg_class t
|
||||
JOIN
|
||||
pg_namespace ns ON ns.oid = t.relnamespace
|
||||
CROSS JOIN desired_relkinds dk
|
||||
WHERE
|
||||
t.relkind = ANY(dk.kinds) -- Filter by selected table relkinds ('r', 'p')
|
||||
AND (NULLIF(TRIM($1), '') IS NULL OR t.relname = ANY(string_to_array($1,','))) -- $1 is object_names
|
||||
AND ns.nspname NOT IN ('pg_catalog', 'information_schema', 'pg_toast')
|
||||
AND ns.nspname NOT LIKE 'pg_temp_%' AND ns.nspname NOT LIKE 'pg_toast_temp_%'
|
||||
),
|
||||
columns_info AS (
|
||||
SELECT
|
||||
att.attrelid AS table_oid, att.attname AS column_name, format_type(att.atttypid, att.atttypmod) AS data_type,
|
||||
att.attnum AS column_ordinal_position, att.attnotnull AS is_not_nullable,
|
||||
pg_get_expr(ad.adbin, ad.adrelid) AS column_default, col_description(att.attrelid, att.attnum) AS column_comment
|
||||
FROM pg_attribute att LEFT JOIN pg_attrdef ad ON att.attrelid = ad.adrelid AND att.attnum = ad.adnum
|
||||
JOIN table_info ti ON att.attrelid = ti.table_oid WHERE att.attnum > 0 AND NOT att.attisdropped
|
||||
),
|
||||
constraints_info AS (
|
||||
SELECT
|
||||
con.conrelid AS table_oid, con.conname AS constraint_name, pg_get_constraintdef(con.oid) AS constraint_definition,
|
||||
CASE con.contype WHEN 'p' THEN 'PRIMARY KEY' WHEN 'f' THEN 'FOREIGN KEY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' ELSE con.contype::text END AS constraint_type,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.conkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.conrelid AND att.attnum = u.attnum) AS constraint_columns,
|
||||
NULLIF(con.confrelid, 0)::regclass AS foreign_key_referenced_table,
|
||||
(SELECT array_agg(att.attname ORDER BY u.attposition) FROM unnest(con.confkey) WITH ORDINALITY AS u(attnum, attposition) JOIN pg_attribute att ON att.attrelid = con.confrelid AND att.attnum = u.attnum WHERE con.contype = 'f') AS foreign_key_referenced_columns
|
||||
FROM pg_constraint con JOIN table_info ti ON con.conrelid = ti.table_oid
|
||||
),
|
||||
indexes_info AS (
|
||||
SELECT
|
||||
idx.indrelid AS table_oid, ic.relname AS index_name, pg_get_indexdef(idx.indexrelid) AS index_definition,
|
||||
idx.indisunique AS is_unique, idx.indisprimary AS is_primary, am.amname AS index_method,
|
||||
(SELECT array_agg(att.attname ORDER BY u.ord) FROM unnest(idx.indkey::int[]) WITH ORDINALITY AS u(colidx, ord) LEFT JOIN pg_attribute att ON att.attrelid = idx.indrelid AND att.attnum = u.colidx WHERE u.colidx <> 0) AS index_columns
|
||||
FROM pg_index idx JOIN pg_class ic ON ic.oid = idx.indexrelid JOIN pg_am am ON am.oid = ic.relam JOIN table_info ti ON idx.indrelid = ti.table_oid
|
||||
),
|
||||
triggers_info AS (
|
||||
SELECT tg.tgrelid AS table_oid, tg.tgname AS trigger_name, pg_get_triggerdef(tg.oid) AS trigger_definition, tg.tgenabled AS trigger_enabled_state
|
||||
FROM pg_trigger tg JOIN table_info ti ON tg.tgrelid = ti.table_oid WHERE NOT tg.tgisinternal
|
||||
)
|
||||
SELECT
|
||||
ti.schema_name,
|
||||
ti.table_name AS object_name,
|
||||
json_build_object(
|
||||
'schema_name', ti.schema_name,
|
||||
'object_name', ti.table_name,
|
||||
'object_type', CASE ti.object_kind
|
||||
WHEN 'r' THEN 'TABLE'
|
||||
WHEN 'p' THEN 'PARTITIONED TABLE'
|
||||
ELSE ti.object_kind::text -- Should not happen due to WHERE clause
|
||||
END,
|
||||
'owner', ti.table_owner,
|
||||
'comment', ti.table_comment,
|
||||
'columns', COALESCE((SELECT json_agg(json_build_object('column_name',ci.column_name,'data_type',ci.data_type,'ordinal_position',ci.column_ordinal_position,'is_not_nullable',ci.is_not_nullable,'column_default',ci.column_default,'column_comment',ci.column_comment) ORDER BY ci.column_ordinal_position) FROM columns_info ci WHERE ci.table_oid = ti.table_oid), '[]'::json),
|
||||
'constraints', COALESCE((SELECT json_agg(json_build_object('constraint_name',cons.constraint_name,'constraint_type',cons.constraint_type,'constraint_definition',cons.constraint_definition,'constraint_columns',cons.constraint_columns,'foreign_key_referenced_table',cons.foreign_key_referenced_table,'foreign_key_referenced_columns',cons.foreign_key_referenced_columns)) FROM constraints_info cons WHERE cons.table_oid = ti.table_oid), '[]'::json),
|
||||
'indexes', COALESCE((SELECT json_agg(json_build_object('index_name',ii.index_name,'index_definition',ii.index_definition,'is_unique',ii.is_unique,'is_primary',ii.is_primary,'index_method',ii.index_method,'index_columns',ii.index_columns)) FROM indexes_info ii WHERE ii.table_oid = ti.table_oid), '[]'::json),
|
||||
'triggers', COALESCE((SELECT json_agg(json_build_object('trigger_name',tri.trigger_name,'trigger_definition',tri.trigger_definition,'trigger_enabled_state',tri.trigger_enabled_state)) FROM triggers_info tri WHERE tri.table_oid = ti.table_oid), '[]'::json)
|
||||
) AS object_details
|
||||
FROM table_info ti ORDER BY ti.schema_name, ti.table_name;
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
|
||||
toolsets:
|
||||
postgres-database-tools:
|
||||
- execute_sql
|
||||
- list_tables
|
||||
210
internal/prebuiltconfigs/tools/spanner-postgres.yaml
Normal file
210
internal/prebuiltconfigs/tools/spanner-postgres.yaml
Normal file
@@ -0,0 +1,210 @@
|
||||
sources:
|
||||
spanner-source:
|
||||
kind: "spanner"
|
||||
project: ${SPANNER_PROJECT}
|
||||
instance: ${SPANNER_INSTANCE}
|
||||
database: ${SPANNER_DATABASE}
|
||||
dialect: "postgresql"
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: spanner-execute-sql
|
||||
source: spanner-source
|
||||
description: Use this tool to execute DML SQL. Please use the PostgreSQL interface for Spanner.
|
||||
|
||||
execute_sql_dql:
|
||||
kind: spanner-execute-sql
|
||||
source: spanner-source
|
||||
description: Use this tool to execute DQL SQL. Please use the PostgreSQL interface for Spanner.
|
||||
readOnly: true
|
||||
|
||||
list_tables:
|
||||
kind: spanner-sql
|
||||
source: spanner-source
|
||||
readOnly: true
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes, triggers, owner, comment) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH table_info_cte AS (
|
||||
SELECT
|
||||
T.TABLE_SCHEMA,
|
||||
T.TABLE_NAME,
|
||||
T.TABLE_TYPE,
|
||||
T.PARENT_TABLE_NAME,
|
||||
T.ON_DELETE_ACTION
|
||||
FROM INFORMATION_SCHEMA.TABLES AS T
|
||||
WHERE
|
||||
T.TABLE_SCHEMA = 'public'
|
||||
AND T.TABLE_TYPE = 'BASE TABLE'
|
||||
AND (
|
||||
NULLIF(TRIM($1), '') IS NULL OR
|
||||
T.TABLE_NAME IN (
|
||||
SELECT table_name
|
||||
FROM UNNEST(regexp_split_to_array($1, '\s*,\s*')) AS table_name)
|
||||
)
|
||||
),
|
||||
|
||||
columns_info_cte AS (
|
||||
SELECT
|
||||
C.TABLE_SCHEMA,
|
||||
C.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"column_name":"', COALESCE(REPLACE(C.COLUMN_NAME, '"', '\"'), ''), '",',
|
||||
'"data_type":"', COALESCE(REPLACE(C.SPANNER_TYPE, '"', '\"'), ''), '",',
|
||||
'"ordinal_position":', C.ORDINAL_POSITION::TEXT, ',',
|
||||
'"is_not_nullable":', CASE WHEN C.IS_NULLABLE = 'NO' THEN 'true' ELSE 'false' END, ',',
|
||||
'"column_default":', CASE WHEN C.COLUMN_DEFAULT IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(C.COLUMN_DEFAULT::text, '"', '\"'), '"') END,
|
||||
'}'
|
||||
) ORDER BY C.ORDINAL_POSITION
|
||||
) AS columns_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.COLUMNS AS C
|
||||
WHERE C.TABLE_SCHEMA = 'public'
|
||||
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
|
||||
),
|
||||
|
||||
constraint_columns_agg_cte AS (
|
||||
SELECT
|
||||
CONSTRAINT_CATALOG,
|
||||
CONSTRAINT_SCHEMA,
|
||||
CONSTRAINT_NAME,
|
||||
ARRAY_AGG('"' || REPLACE(COLUMN_NAME, '"', '\"') || '"' ORDER BY ORDINAL_POSITION) AS column_names_json_list
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
WHERE CONSTRAINT_SCHEMA = 'public'
|
||||
GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
|
||||
),
|
||||
|
||||
constraints_info_cte AS (
|
||||
SELECT
|
||||
TC.TABLE_SCHEMA,
|
||||
TC.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"constraint_name":"', COALESCE(REPLACE(TC.CONSTRAINT_NAME, '"', '\"'), ''), '",',
|
||||
'"constraint_type":"', COALESCE(REPLACE(TC.CONSTRAINT_TYPE, '"', '\"'), ''), '",',
|
||||
'"constraint_definition":',
|
||||
CASE TC.CONSTRAINT_TYPE
|
||||
WHEN 'CHECK' THEN CASE WHEN CC.CHECK_CLAUSE IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(CC.CHECK_CLAUSE, '"', '\"'), '"') END
|
||||
WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
|
||||
WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
|
||||
WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ') REFERENCES ',
|
||||
COALESCE(REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), ''),
|
||||
' (', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ', '), ')', '"')
|
||||
ELSE 'null'
|
||||
END, ',',
|
||||
'"constraint_columns":[', array_to_string(COALESCE(KeyCols.column_names_json_list, ARRAY[]::text[]), ','), '],',
|
||||
'"foreign_key_referenced_table":', CASE WHEN RefKeyTable.TABLE_NAME IS NULL THEN 'null' ELSE CONCAT('"', REPLACE(RefKeyTable.TABLE_NAME, '"', '\"'), '"') END, ',',
|
||||
'"foreign_key_referenced_columns":[', array_to_string(COALESCE(RefKeyCols.column_names_json_list, ARRAY[]::text[]), ','), ']',
|
||||
'}'
|
||||
) ORDER BY TC.CONSTRAINT_NAME
|
||||
) AS constraints_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
|
||||
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
|
||||
ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
|
||||
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
|
||||
ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
|
||||
LEFT JOIN constraint_columns_agg_cte AS KeyCols
|
||||
ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
|
||||
LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
|
||||
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
|
||||
WHERE TC.TABLE_SCHEMA = 'public'
|
||||
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
|
||||
),
|
||||
|
||||
index_key_columns_agg_cte AS (
|
||||
SELECT
|
||||
TABLE_CATALOG,
|
||||
TABLE_SCHEMA,
|
||||
TABLE_NAME,
|
||||
INDEX_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{"column_name":"', COALESCE(REPLACE(COLUMN_NAME, '"', '\"'), ''), '",',
|
||||
'"ordering":"', COALESCE(REPLACE(COLUMN_ORDERING, '"', '\"'), ''), '"}'
|
||||
) ORDER BY ORDINAL_POSITION
|
||||
) AS key_column_json_details
|
||||
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
|
||||
WHERE ORDINAL_POSITION IS NOT NULL
|
||||
AND TABLE_SCHEMA = 'public'
|
||||
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
|
||||
),
|
||||
|
||||
index_storing_columns_agg_cte AS (
|
||||
SELECT
|
||||
TABLE_CATALOG,
|
||||
TABLE_SCHEMA,
|
||||
TABLE_NAME,
|
||||
INDEX_NAME,
|
||||
ARRAY_AGG(CONCAT('"', REPLACE(COLUMN_NAME, '"', '\"'), '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
|
||||
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
|
||||
WHERE ORDINAL_POSITION IS NULL
|
||||
AND TABLE_SCHEMA = 'public'
|
||||
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
|
||||
),
|
||||
|
||||
indexes_info_cte AS (
|
||||
SELECT
|
||||
I.TABLE_SCHEMA,
|
||||
I.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"index_name":"', COALESCE(REPLACE(I.INDEX_NAME, '"', '\"'), ''), '",',
|
||||
'"index_type":"', COALESCE(REPLACE(I.INDEX_TYPE, '"', '\"'), ''), '",',
|
||||
'"is_unique":', CASE WHEN I.IS_UNIQUE = 'YES' THEN 'true' ELSE 'false' END, ',',
|
||||
'"is_null_filtered":', CASE WHEN I.IS_NULL_FILTERED = 'YES' THEN 'true' ELSE 'false' END, ',',
|
||||
'"interleaved_in_table":', CASE WHEN I.PARENT_TABLE_NAME IS NULL OR I.PARENT_TABLE_NAME = '' THEN 'null' ELSE CONCAT('"', REPLACE(I.PARENT_TABLE_NAME, '"', '\"'), '"') END, ',',
|
||||
'"index_key_columns":[', COALESCE(array_to_string(KeyIndexCols.key_column_json_details, ','), ''), '],',
|
||||
'"storing_columns":[', COALESCE(array_to_string(StoringIndexCols.storing_column_json_names, ','), ''), ']',
|
||||
'}'
|
||||
) ORDER BY I.INDEX_NAME
|
||||
) AS indexes_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.INDEXES AS I
|
||||
LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
|
||||
ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
|
||||
LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
|
||||
ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME
|
||||
AND I.INDEX_TYPE IN ('LOCAL', 'GLOBAL')
|
||||
WHERE I.TABLE_SCHEMA = 'public'
|
||||
AND EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
|
||||
)
|
||||
|
||||
SELECT
|
||||
TI.TABLE_SCHEMA AS schema_name,
|
||||
TI.TABLE_NAME AS object_name,
|
||||
CONCAT(
|
||||
'{',
|
||||
'"schema_name":"', COALESCE(REPLACE(TI.TABLE_SCHEMA, '"', '\"'), ''), '",',
|
||||
'"object_name":"', COALESCE(REPLACE(TI.TABLE_NAME, '"', '\"'), ''), '",',
|
||||
'"object_type":"', COALESCE(REPLACE(TI.TABLE_TYPE, '"', '\"'), ''), '",',
|
||||
'"columns":[', COALESCE(array_to_string(CI.columns_json_array_elements, ','), ''), '],',
|
||||
'"constraints":[', COALESCE(array_to_string(CONSI.constraints_json_array_elements, ','), ''), '],',
|
||||
'"indexes":[', COALESCE(array_to_string(II.indexes_json_array_elements, ','), ''), ']',
|
||||
'}'
|
||||
) AS object_details
|
||||
FROM table_info_cte AS TI
|
||||
LEFT JOIN columns_info_cte AS CI
|
||||
ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
|
||||
LEFT JOIN constraints_info_cte AS CONSI
|
||||
ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
|
||||
LEFT JOIN indexes_info_cte AS II
|
||||
ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
|
||||
ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME;
|
||||
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
toolsets:
|
||||
spanner-postgres-database-tools:
|
||||
- execute_sql
|
||||
- execute_sql_dql
|
||||
- list_tables
|
||||
211
internal/prebuiltconfigs/tools/spanner.yaml
Normal file
211
internal/prebuiltconfigs/tools/spanner.yaml
Normal file
@@ -0,0 +1,211 @@
|
||||
sources:
|
||||
spanner-source:
|
||||
kind: spanner
|
||||
project: ${SPANNER_PROJECT}
|
||||
instance: ${SPANNER_INSTANCE}
|
||||
database: ${SPANNER_DATABASE}
|
||||
|
||||
tools:
|
||||
execute_sql:
|
||||
kind: spanner-execute-sql
|
||||
source: spanner-source
|
||||
description: Use this tool to execute DML SQL
|
||||
|
||||
execute_sql_dql:
|
||||
kind: spanner-execute-sql
|
||||
source: spanner-source
|
||||
description: Use this tool to execute DQL SQL
|
||||
readOnly: true
|
||||
|
||||
list_tables:
|
||||
kind: spanner-sql
|
||||
source: spanner-source
|
||||
readOnly: true
|
||||
description: "Lists detailed schema information (object type, columns, constraints, indexes) as JSON for user-created tables (ordinary or partitioned). Filters by a comma-separated list of names. If names are omitted, lists all tables in user schemas."
|
||||
statement: |
|
||||
WITH FilterTableNames AS (
|
||||
SELECT DISTINCT TRIM(name) AS TABLE_NAME
|
||||
FROM UNNEST(IF(@table_names = '' OR @table_names IS NULL, ['%'], SPLIT(@table_names, ','))) AS name
|
||||
),
|
||||
|
||||
-- 1. Table Information
|
||||
table_info_cte AS (
|
||||
SELECT
|
||||
T.TABLE_SCHEMA,
|
||||
T.TABLE_NAME,
|
||||
T.TABLE_TYPE,
|
||||
T.PARENT_TABLE_NAME, -- For interleaved tables
|
||||
T.ON_DELETE_ACTION -- For interleaved tables
|
||||
FROM INFORMATION_SCHEMA.TABLES AS T
|
||||
WHERE
|
||||
T.TABLE_SCHEMA = ''
|
||||
AND T.TABLE_TYPE = 'BASE TABLE'
|
||||
AND (EXISTS (SELECT 1 FROM FilterTableNames WHERE FilterTableNames.TABLE_NAME = '%') OR T.TABLE_NAME IN (SELECT TABLE_NAME FROM FilterTableNames))
|
||||
),
|
||||
|
||||
-- 2. Column Information (with JSON string for each column)
|
||||
columns_info_cte AS (
|
||||
SELECT
|
||||
C.TABLE_SCHEMA,
|
||||
C.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"column_name":"', IFNULL(C.COLUMN_NAME, ''), '",',
|
||||
'"data_type":"', IFNULL(C.SPANNER_TYPE, ''), '",',
|
||||
'"ordinal_position":', CAST(C.ORDINAL_POSITION AS STRING), ',',
|
||||
'"is_not_nullable":', IF(C.IS_NULLABLE = 'NO', 'true', 'false'), ',',
|
||||
'"column_default":', IF(C.COLUMN_DEFAULT IS NULL, 'null', CONCAT('"', C.COLUMN_DEFAULT, '"')),
|
||||
'}'
|
||||
) ORDER BY C.ORDINAL_POSITION
|
||||
) AS columns_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.COLUMNS AS C
|
||||
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE C.TABLE_SCHEMA = TI.TABLE_SCHEMA AND C.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY C.TABLE_SCHEMA, C.TABLE_NAME
|
||||
),
|
||||
|
||||
-- Helper CTE for aggregating constraint columns
|
||||
constraint_columns_agg_cte AS (
|
||||
SELECT
|
||||
CONSTRAINT_CATALOG,
|
||||
CONSTRAINT_SCHEMA,
|
||||
CONSTRAINT_NAME,
|
||||
ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY ORDINAL_POSITION) AS column_names_json_list
|
||||
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE
|
||||
GROUP BY CONSTRAINT_CATALOG, CONSTRAINT_SCHEMA, CONSTRAINT_NAME
|
||||
),
|
||||
|
||||
-- 3. Constraint Information (with JSON string for each constraint)
|
||||
constraints_info_cte AS (
|
||||
SELECT
|
||||
TC.TABLE_SCHEMA,
|
||||
TC.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"constraint_name":"', IFNULL(TC.CONSTRAINT_NAME, ''), '",',
|
||||
'"constraint_type":"', IFNULL(TC.CONSTRAINT_TYPE, ''), '",',
|
||||
'"constraint_definition":',
|
||||
CASE TC.CONSTRAINT_TYPE
|
||||
WHEN 'CHECK' THEN IF(CC.CHECK_CLAUSE IS NULL, 'null', CONCAT('"', CC.CHECK_CLAUSE, '"'))
|
||||
WHEN 'PRIMARY KEY' THEN CONCAT('"', 'PRIMARY KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
|
||||
WHEN 'UNIQUE' THEN CONCAT('"', 'UNIQUE (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ')', '"')
|
||||
WHEN 'FOREIGN KEY' THEN CONCAT('"', 'FOREIGN KEY (', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ', '), ') REFERENCES ',
|
||||
IFNULL(RefKeyTable.TABLE_NAME, ''),
|
||||
' (', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ', '), ')', '"')
|
||||
ELSE 'null'
|
||||
END, ',',
|
||||
'"constraint_columns":[', ARRAY_TO_STRING(COALESCE(KeyCols.column_names_json_list, []), ','), '],',
|
||||
'"foreign_key_referenced_table":', IF(RefKeyTable.TABLE_NAME IS NULL, 'null', CONCAT('"', RefKeyTable.TABLE_NAME, '"')), ',',
|
||||
'"foreign_key_referenced_columns":[', ARRAY_TO_STRING(COALESCE(RefKeyCols.column_names_json_list, []), ','), ']',
|
||||
'}'
|
||||
) ORDER BY TC.CONSTRAINT_NAME
|
||||
) AS constraints_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS TC
|
||||
LEFT JOIN INFORMATION_SCHEMA.CHECK_CONSTRAINTS AS CC
|
||||
ON TC.CONSTRAINT_CATALOG = CC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = CC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = CC.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC
|
||||
ON TC.CONSTRAINT_CATALOG = RC.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = RC.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = RC.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS RefConstraint
|
||||
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefConstraint.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefConstraint.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefConstraint.CONSTRAINT_NAME
|
||||
LEFT JOIN INFORMATION_SCHEMA.TABLES AS RefKeyTable
|
||||
ON RefConstraint.TABLE_CATALOG = RefKeyTable.TABLE_CATALOG AND RefConstraint.TABLE_SCHEMA = RefKeyTable.TABLE_SCHEMA AND RefConstraint.TABLE_NAME = RefKeyTable.TABLE_NAME
|
||||
LEFT JOIN constraint_columns_agg_cte AS KeyCols
|
||||
ON TC.CONSTRAINT_CATALOG = KeyCols.CONSTRAINT_CATALOG AND TC.CONSTRAINT_SCHEMA = KeyCols.CONSTRAINT_SCHEMA AND TC.CONSTRAINT_NAME = KeyCols.CONSTRAINT_NAME
|
||||
LEFT JOIN constraint_columns_agg_cte AS RefKeyCols
|
||||
ON RC.UNIQUE_CONSTRAINT_CATALOG = RefKeyCols.CONSTRAINT_CATALOG AND RC.UNIQUE_CONSTRAINT_SCHEMA = RefKeyCols.CONSTRAINT_SCHEMA AND RC.UNIQUE_CONSTRAINT_NAME = RefKeyCols.CONSTRAINT_NAME AND TC.CONSTRAINT_TYPE = 'FOREIGN KEY'
|
||||
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE TC.TABLE_SCHEMA = TI.TABLE_SCHEMA AND TC.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY TC.TABLE_SCHEMA, TC.TABLE_NAME
|
||||
),
|
||||
|
||||
-- Helper CTE for aggregating index key columns (as JSON strings)
|
||||
index_key_columns_agg_cte AS (
|
||||
SELECT
|
||||
TABLE_CATALOG,
|
||||
TABLE_SCHEMA,
|
||||
TABLE_NAME,
|
||||
INDEX_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{"column_name":"', IFNULL(COLUMN_NAME, ''), '",',
|
||||
'"ordering":"', IFNULL(COLUMN_ORDERING, ''), '"}'
|
||||
) ORDER BY ORDINAL_POSITION
|
||||
) AS key_column_json_details
|
||||
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
|
||||
WHERE ORDINAL_POSITION IS NOT NULL -- Key columns
|
||||
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
|
||||
),
|
||||
|
||||
-- Helper CTE for aggregating index storing columns (as JSON strings)
|
||||
index_storing_columns_agg_cte AS (
|
||||
SELECT
|
||||
TABLE_CATALOG,
|
||||
TABLE_SCHEMA,
|
||||
TABLE_NAME,
|
||||
INDEX_NAME,
|
||||
ARRAY_AGG(CONCAT('"', COLUMN_NAME, '"') ORDER BY COLUMN_NAME) AS storing_column_json_names
|
||||
FROM INFORMATION_SCHEMA.INDEX_COLUMNS
|
||||
WHERE ORDINAL_POSITION IS NULL -- Storing columns
|
||||
GROUP BY TABLE_CATALOG, TABLE_SCHEMA, TABLE_NAME, INDEX_NAME
|
||||
),
|
||||
|
||||
-- 4. Index Information (with JSON string for each index)
|
||||
indexes_info_cte AS (
|
||||
SELECT
|
||||
I.TABLE_SCHEMA,
|
||||
I.TABLE_NAME,
|
||||
ARRAY_AGG(
|
||||
CONCAT(
|
||||
'{',
|
||||
'"index_name":"', IFNULL(I.INDEX_NAME, ''), '",',
|
||||
'"index_type":"', IFNULL(I.INDEX_TYPE, ''), '",',
|
||||
'"is_unique":', IF(I.IS_UNIQUE, 'true', 'false'), ',',
|
||||
'"is_null_filtered":', IF(I.IS_NULL_FILTERED, 'true', 'false'), ',',
|
||||
'"interleaved_in_table":', IF(I.PARENT_TABLE_NAME IS NULL, 'null', CONCAT('"', I.PARENT_TABLE_NAME, '"')), ',',
|
||||
'"index_key_columns":[', ARRAY_TO_STRING(COALESCE(KeyIndexCols.key_column_json_details, []), ','), '],',
|
||||
'"storing_columns":[', ARRAY_TO_STRING(COALESCE(StoringIndexCols.storing_column_json_names, []), ','), ']',
|
||||
'}'
|
||||
) ORDER BY I.INDEX_NAME
|
||||
) AS indexes_json_array_elements
|
||||
FROM INFORMATION_SCHEMA.INDEXES AS I
|
||||
LEFT JOIN index_key_columns_agg_cte AS KeyIndexCols
|
||||
ON I.TABLE_CATALOG = KeyIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = KeyIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = KeyIndexCols.TABLE_NAME AND I.INDEX_NAME = KeyIndexCols.INDEX_NAME
|
||||
LEFT JOIN index_storing_columns_agg_cte AS StoringIndexCols
|
||||
ON I.TABLE_CATALOG = StoringIndexCols.TABLE_CATALOG AND I.TABLE_SCHEMA = StoringIndexCols.TABLE_SCHEMA AND I.TABLE_NAME = StoringIndexCols.TABLE_NAME AND I.INDEX_NAME = StoringIndexCols.INDEX_NAME AND I.INDEX_TYPE = 'INDEX'
|
||||
WHERE EXISTS (SELECT 1 FROM table_info_cte TI WHERE I.TABLE_SCHEMA = TI.TABLE_SCHEMA AND I.TABLE_NAME = TI.TABLE_NAME)
|
||||
GROUP BY I.TABLE_SCHEMA, I.TABLE_NAME
|
||||
)
|
||||
|
||||
-- Final SELECT to build the JSON output
|
||||
SELECT
|
||||
TI.TABLE_SCHEMA AS schema_name,
|
||||
TI.TABLE_NAME AS object_name,
|
||||
CONCAT(
|
||||
'{',
|
||||
'"schema_name":"', IFNULL(TI.TABLE_SCHEMA, ''), '",',
|
||||
'"object_name":"', IFNULL(TI.TABLE_NAME, ''), '",',
|
||||
'"object_type":"', IFNULL(TI.TABLE_TYPE, ''), '",',
|
||||
'"columns":[', ARRAY_TO_STRING(COALESCE(CI.columns_json_array_elements, []), ','), '],',
|
||||
'"constraints":[', ARRAY_TO_STRING(COALESCE(CONSI.constraints_json_array_elements, []), ','), '],',
|
||||
'"indexes":[', ARRAY_TO_STRING(COALESCE(II.indexes_json_array_elements, []), ','), '],',
|
||||
'}'
|
||||
) AS object_details
|
||||
FROM table_info_cte AS TI
|
||||
LEFT JOIN columns_info_cte AS CI
|
||||
ON TI.TABLE_SCHEMA = CI.TABLE_SCHEMA AND TI.TABLE_NAME = CI.TABLE_NAME
|
||||
LEFT JOIN constraints_info_cte AS CONSI
|
||||
ON TI.TABLE_SCHEMA = CONSI.TABLE_SCHEMA AND TI.TABLE_NAME = CONSI.TABLE_NAME
|
||||
LEFT JOIN indexes_info_cte AS II
|
||||
ON TI.TABLE_SCHEMA = II.TABLE_SCHEMA AND TI.TABLE_NAME = II.TABLE_NAME
|
||||
ORDER BY TI.TABLE_SCHEMA, TI.TABLE_NAME;
|
||||
|
||||
parameters:
|
||||
- name: table_names
|
||||
type: string
|
||||
description: "Optional: A comma-separated list of table names. If empty, details for all tables in user-accessible schemas will be listed."
|
||||
|
||||
toolsets:
|
||||
spanner-database-tools:
|
||||
- execute_sql
|
||||
- execute_sql_dql
|
||||
- list_tables
|
||||
Reference in New Issue
Block a user