feat: add optional projectID parameter to bigquery tools (#799)

Optional projectID parameter enables dynamic, cross-project resource
access in BigQuery tools.

This allows a single tool configuration to target different projects at
runtime, rather than being fixed to the project in its source
configuration.

---------

Co-authored-by: Yuan Teoh <45984206+Yuan325@users.noreply.github.com>
Co-authored-by: Wenxin Du <117315983+duwenxin99@users.noreply.github.com>
This commit is contained in:
Huan Chen
2025-07-08 15:02:42 -07:00
committed by GitHub
parent 04e2529ba9
commit c6ab74c5da
9 changed files with 135 additions and 35 deletions

View File

@@ -15,8 +15,10 @@ It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
on the given source.
`bigquery-get-dataset-info` takes a `dataset` parameter to specify the dataset
on the given source. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided,
the tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -15,8 +15,10 @@ It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
bigquery-get-table-info takes dataset and table parameters to specify
the target table.
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
the target table. It also optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -15,8 +15,9 @@ It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
bigquery-list-dataset-ids requires no input parameters beyond the configured
source.
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -15,8 +15,10 @@ It's compatible with the following sources:
- [bigquery](../sources/bigquery.md)
bigquery-get-dataset-info takes a dataset parameter to specify the dataset
from which to list table IDs.
`bigquery-get-dataset-info` takes a required `dataset` parameter to specify the dataset
from which to list table IDs. It also optionally accepts a `project` parameter to
define the Google Cloud project ID. If the `project` parameter is not provided, the
tool defaults to using the project defined in the source configuration.
## Example

View File

@@ -26,6 +26,8 @@ import (
)
const kind string = "bigquery-get-dataset-info"
const projectKey string = "project"
const datasetKey string = "dataset"
func init() {
if !tools.Register(kind, newConfig) {
@@ -78,8 +80,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
datasetParameter := tools.NewStringParameter("dataset", "The dataset to get metadata information.")
parameters := tools.Parameters{datasetParameter}
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to get metadata information.")
parameters := tools.Parameters{projectParameter, datasetParameter}
mcpManifest := tools.McpManifest{
Name: cfg.Name,
@@ -116,14 +119,18 @@ type Tool struct {
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
sliceParams := params.AsSlice()
datasetId, ok := sliceParams[0].(string)
mapParams := params.AsMap()
projectId, ok := mapParams[projectKey].(string)
if !ok {
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
}
dsHandle := t.Client.Dataset(datasetId)
datasetId, ok := mapParams[datasetKey].(string)
if !ok {
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
}
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
metadata, err := dsHandle.Metadata(ctx)
if err != nil {

View File

@@ -26,6 +26,9 @@ import (
)
const kind string = "bigquery-get-table-info"
const projectKey string = "project"
const datasetKey string = "dataset"
const tableKey string = "table"
func init() {
if !tools.Register(kind, newConfig) {
@@ -78,9 +81,10 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
datasetParameter := tools.NewStringParameter("dataset", "The table's parent dataset.")
tableParameter := tools.NewStringParameter("table", "The table to get metadata information.")
parameters := tools.Parameters{datasetParameter, tableParameter}
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset and table.")
datasetParameter := tools.NewStringParameter(datasetKey, "The table's parent dataset.")
tableParameter := tools.NewStringParameter(tableKey, "The table to get metadata information.")
parameters := tools.Parameters{projectParameter, datasetParameter, tableParameter}
mcpManifest := tools.McpManifest{
Name: cfg.Name,
@@ -117,22 +121,28 @@ type Tool struct {
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
sliceParams := params.AsSlice()
datasetId, ok := sliceParams[0].(string)
mapParams := params.AsMap()
projectId, ok := mapParams[projectKey].(string)
if !ok {
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
}
tableId, ok := sliceParams[1].(string)
if !ok {
return nil, fmt.Errorf("unable to get cast %s", sliceParams[1])
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
}
dsHandle := t.Client.Dataset(datasetId)
datasetId, ok := mapParams[datasetKey].(string)
if !ok {
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
}
tableId, ok := mapParams[tableKey].(string)
if !ok {
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", tableKey)
}
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
tableHandle := dsHandle.Table(tableId)
metadata, err := tableHandle.Metadata(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get metadata for table %s.%s.%s: %w", t.Client.Project(), datasetId, tableId, err)
return nil, fmt.Errorf("failed to get metadata for table %s.%s.%s: %w", projectId, datasetId, tableId, err)
}
return []any{metadata}, nil

View File

@@ -27,6 +27,7 @@ import (
)
const kind string = "bigquery-list-dataset-ids"
const projectKey string = "project"
func init() {
if !tools.Register(kind, newConfig) {
@@ -79,7 +80,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
parameters := tools.Parameters{}
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project to list dataset ids.")
parameters := tools.Parameters{projectParameter}
mcpManifest := tools.McpManifest{
Name: cfg.Name,
@@ -116,7 +119,13 @@ type Tool struct {
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
mapParams := params.AsMap()
projectId, ok := mapParams[projectKey].(string)
if !ok {
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
}
datasetIterator := t.Client.Datasets(ctx)
datasetIterator.ProjectID = projectId
var datasetIds []any
for {

View File

@@ -27,6 +27,8 @@ import (
)
const kind string = "bigquery-list-table-ids"
const projectKey string = "project"
const datasetKey string = "dataset"
func init() {
if !tools.Register(kind, newConfig) {
@@ -79,8 +81,9 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
}
datasetParameter := tools.NewStringParameter("dataset", "The dataset to list table ids.")
parameters := tools.Parameters{datasetParameter}
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryClient().Project(), "The Google Cloud project ID containing the dataset.")
datasetParameter := tools.NewStringParameter(datasetKey, "The dataset to list table ids.")
parameters := tools.Parameters{projectParameter, datasetParameter}
mcpManifest := tools.McpManifest{
Name: cfg.Name,
@@ -117,14 +120,18 @@ type Tool struct {
}
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues) ([]any, error) {
sliceParams := params.AsSlice()
datasetId, ok := sliceParams[0].(string)
mapParams := params.AsMap()
projectId, ok := mapParams[projectKey].(string)
if !ok {
return nil, fmt.Errorf("unable to get cast %s", sliceParams[0])
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", projectKey)
}
dsHandle := t.Client.Dataset(datasetId)
datasetId, ok := mapParams[datasetKey].(string)
if !ok {
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", datasetKey)
}
dsHandle := t.Client.DatasetInProject(projectId, datasetId)
var tableIds []any
tableIterator := dsHandle.Tables(ctx)

View File

@@ -501,6 +501,21 @@ func runBigQueryListDatasetToolInvokeTest(t *testing.T, datasetWant string) {
isErr: false,
want: datasetWant,
},
{
name: "invoke my-list-dataset-ids-tool with project",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\"}", BigqueryProject))),
isErr: false,
want: datasetWant,
},
{
name: "invoke my-list-dataset-ids-tool with non-existent project",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\"}", BigqueryProject, uuid.NewString()))),
isErr: true,
},
{
name: "invoke my-auth-list-dataset-ids-tool",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-dataset-ids-tool/invoke",
@@ -585,6 +600,21 @@ func runBigQueryGetDatasetInfoToolInvokeTest(t *testing.T, datasetName, datasetI
want: datasetInfoWant,
isErr: false,
},
{
name: "Invoke my-auth-get-dataset-info-tool with correct project",
api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))),
want: datasetInfoWant,
isErr: false,
},
{
name: "Invoke my-auth-get-dataset-info-tool with non-existent project",
api: "http://127.0.0.1:5000/api/tool/my-auth-get-dataset-info-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))),
isErr: true,
},
{
name: "invoke my-auth-get-dataset-info-tool without body",
api: "http://127.0.0.1:5000/api/tool/my-get-dataset-info-tool/invoke",
@@ -705,6 +735,21 @@ func runBigQueryListTableIdsToolInvokeTest(t *testing.T, datasetName, tablename_
want: tablename_want,
isErr: false,
},
{
name: "Invoke my-auth-list-table-ids-tool with correct project",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\"}", BigqueryProject, datasetName))),
want: tablename_want,
isErr: false,
},
{
name: "Invoke my-auth-list-table-ids-tool with non-existent project",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName))),
isErr: true,
},
{
name: "Invoke my-auth-list-table-ids-tool with invalid auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-list-table-ids-tool/invoke",
@@ -810,6 +855,21 @@ func runBigQueryGetTableInfoToolInvokeTest(t *testing.T, datasetName, tableName,
want: tableInfoWant,
isErr: false,
},
{
name: "Invoke my-auth-get-table-info-tool with correct project",
api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, datasetName, tableName))),
want: tableInfoWant,
isErr: false,
},
{
name: "Invoke my-auth-get-table-info-tool with non-existent project",
api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",
requestHeader: map[string]string{"my-google-auth_token": idToken},
requestBody: bytes.NewBuffer([]byte(fmt.Sprintf("{\"project\":\"%s-%s\", \"dataset\":\"%s\", \"table\":\"%s\"}", BigqueryProject, uuid.NewString(), datasetName, tableName))),
isErr: true,
},
{
name: "Invoke my-auth-get-table-info-tool with invalid auth token",
api: "http://127.0.0.1:5000/api/tool/my-auth-get-table-info-tool/invoke",