mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-30 17:58:24 -05:00
Compare commits
8 Commits
skillgen
...
integratio
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a18ab29f4a | ||
|
|
feee91a18d | ||
|
|
468470098e | ||
|
|
394c3b78bc | ||
|
|
4874c12d3f | ||
|
|
d0d9b78b3b | ||
|
|
f6587cfaf8 | ||
|
|
e0245946ea |
@@ -37,6 +37,7 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
||||
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
||||
|
||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||
https://www.npmjs.com/package/@toolbox-sdk/server
|
||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||
https://www.oceanbase.com/
|
||||
|
||||
@@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
@@ -27,6 +27,13 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```.
|
||||
|
||||
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -21,6 +21,13 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```.
|
||||
|
||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
@@ -24,6 +24,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
@@ -26,6 +26,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -23,6 +23,14 @@ To connect to the database to explore and query data, search the MCP store for t
|
||||
|
||||
In the Antigravity MCP Store, click the "Install" button.
|
||||
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```.
|
||||
|
||||
You'll now be able to see all enabled tools in the "Tools" tab.
|
||||
|
||||
> [!NOTE]
|
||||
|
||||
@@ -26,6 +26,13 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```.
|
||||
|
||||
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -20,6 +20,13 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```.
|
||||
|
||||
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -21,6 +21,13 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt looker```.
|
||||
|
||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -21,6 +21,13 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the "Install" button.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```.
|
||||
|
||||
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.
|
||||
|
||||
|
||||
@@ -12,10 +12,17 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
|
||||
## Install & Configuration
|
||||
|
||||
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
|
||||
> [!NOTE]
|
||||
> On first use, the installation process automatically downloads and uses
|
||||
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
|
||||
> `>=0.26.0`. To update MCP Toolbox, use:
|
||||
> ```npm i -g @toolbox-sdk/server@latest```
|
||||
> To always run the latest version, update the MCP server configuration to use:
|
||||
> ```npx -y @toolbox-sdk/server@latest```.
|
||||
|
||||
2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||
3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
|
||||
|
||||
3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||
4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
|
||||
|
||||
> [!NOTE]
|
||||
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.
|
||||
|
||||
@@ -139,13 +139,24 @@ func TestAlloyDBPgToolEndpoints(t *testing.T) {
|
||||
|
||||
// set up data for param tool
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
defer teardownTable1(t)
|
||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
if teardownTable1 != nil {
|
||||
defer teardownTable1(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
|
||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
if teardownTable2 != nil {
|
||||
defer teardownTable2(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// Set up table for semanti search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
|
||||
@@ -122,27 +122,42 @@ func TestBigQueryToolEndpoints(t *testing.T) {
|
||||
|
||||
// set up data for param tool
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
||||
teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||
teardownTable1, err := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup param table: %s", err)
|
||||
}
|
||||
defer teardownTable1(t)
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
|
||||
teardownTable2, err := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup auth table: %s", err)
|
||||
}
|
||||
defer teardownTable2(t)
|
||||
|
||||
// set up data for data type test tool
|
||||
createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType)
|
||||
teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
|
||||
teardownTable3, err := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup data type table: %s", err)
|
||||
}
|
||||
defer teardownTable3(t)
|
||||
|
||||
// set up data for forecast tool
|
||||
createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast)
|
||||
teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
|
||||
teardownTable4, err := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup forecast table: %s", err)
|
||||
}
|
||||
defer teardownTable4(t)
|
||||
|
||||
// set up data for analyze contribution tool
|
||||
createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution)
|
||||
teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
|
||||
teardownTable5, err := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup analyze contribution table: %s", err)
|
||||
}
|
||||
defer teardownTable5(t)
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
@@ -231,52 +246,79 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
|
||||
// Setup allowed table
|
||||
allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1)
|
||||
createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1)
|
||||
teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
|
||||
teardownAllowed1 ,err:= setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed table 1: %s", err)
|
||||
}
|
||||
defer teardownAllowed1(t)
|
||||
|
||||
allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2)
|
||||
createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2)
|
||||
teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
|
||||
teardownAllowed2 ,err:= setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed table 2: %s", err)
|
||||
}
|
||||
defer teardownAllowed2(t)
|
||||
|
||||
// Setup allowed forecast table
|
||||
allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1)
|
||||
createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1)
|
||||
teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
|
||||
teardownAllowedForecast1 ,err:= setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed forecast table 1: %s", err)
|
||||
}
|
||||
defer teardownAllowedForecast1(t)
|
||||
|
||||
allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2)
|
||||
createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2)
|
||||
teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
|
||||
teardownAllowedForecast2 ,err:= setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed forecast table 2: %s", err)
|
||||
}
|
||||
defer teardownAllowedForecast2(t)
|
||||
|
||||
// Setup disallowed table
|
||||
disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName)
|
||||
createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam)
|
||||
teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
|
||||
teardownDisallowed ,err:= setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup disallowed table: %s", err)
|
||||
}
|
||||
defer teardownDisallowed(t)
|
||||
|
||||
// Setup disallowed forecast table
|
||||
disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName)
|
||||
createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName)
|
||||
teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
|
||||
teardownDisallowedForecast ,err:= setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup disallowed forecast table: %s", err)
|
||||
}
|
||||
defer teardownDisallowedForecast(t)
|
||||
|
||||
// Setup allowed analyze contribution table
|
||||
allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1)
|
||||
createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1)
|
||||
teardownAllowedAnalyzeContribution1 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
|
||||
teardownAllowedAnalyzeContribution1 ,err:= setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed analyze contribution table 1: %s", err)
|
||||
}
|
||||
defer teardownAllowedAnalyzeContribution1(t)
|
||||
|
||||
allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2)
|
||||
createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2)
|
||||
teardownAllowedAnalyzeContribution2 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
|
||||
teardownAllowedAnalyzeContribution2 ,err:= setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup allowed analyze contribution table 2: %s", err)
|
||||
}
|
||||
defer teardownAllowedAnalyzeContribution2(t)
|
||||
|
||||
// Setup disallowed analyze contribution table
|
||||
disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName)
|
||||
createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName)
|
||||
teardownDisallowedAnalyzeContribution := setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
|
||||
teardownDisallowedAnalyzeContribution ,err:= setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup disallowed analyze contribution table: %s", err)
|
||||
}
|
||||
defer teardownDisallowedAnalyzeContribution(t)
|
||||
|
||||
// Configure source with dataset restriction.
|
||||
@@ -438,7 +480,10 @@ func TestBigQueryWriteModeBlocked(t *testing.T) {
|
||||
t.Fatalf("unable to create BigQuery connection: %s", err)
|
||||
}
|
||||
createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
||||
teardownTable := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||
teardownTable ,err:= setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup BigQuery table: %s", err)
|
||||
}
|
||||
defer teardownTable(t)
|
||||
|
||||
toolsFile := map[string]any{
|
||||
@@ -623,7 +668,7 @@ func getBigQueryTmplToolStatement() (string, string) {
|
||||
return tmplSelectCombined, tmplSelectFilterCombined
|
||||
}
|
||||
|
||||
func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) func(*testing.T) {
|
||||
func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) (func(*testing.T), error) {
|
||||
// Create dataset
|
||||
dataset := client.Dataset(datasetName)
|
||||
_, err := dataset.Metadata(ctx)
|
||||
@@ -699,7 +744,7 @@ func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.C
|
||||
} else if err != nil {
|
||||
t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[string]any {
|
||||
|
||||
@@ -124,13 +124,23 @@ func TestCloudSQLPgSimpleToolEndpoints(t *testing.T) {
|
||||
|
||||
// set up data for param tool
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
defer teardownTable1(t)
|
||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
if teardownTable1 != nil {
|
||||
defer teardownTable1(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
if teardownTable2 != nil {
|
||||
defer teardownTable2(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// Set up table for semantic search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
|
||||
@@ -613,31 +613,36 @@ func GetMySQLWants() (string, string, string, string) {
|
||||
|
||||
// SetupPostgresSQLTable creates and inserts data into a table of tool
|
||||
// compatible with postgres-sql tool
|
||||
func SetupPostgresSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
|
||||
func SetupPostgresSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, createStatement, insertStatement, tableName string, params []any) (func(*testing.T), error) {
|
||||
err := pool.Ping(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to connect to test database: %s", err)
|
||||
// Return nil for the function and the error itself
|
||||
return nil, fmt.Errorf("unable to connect to test database: %w", err)
|
||||
}
|
||||
|
||||
// Create table
|
||||
_, err = pool.Query(ctx, createStatement)
|
||||
_, err = pool.Exec(ctx, createStatement)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
||||
}
|
||||
|
||||
// Insert test data
|
||||
_, err = pool.Query(ctx, insertStatement, params...)
|
||||
_, err = pool.Exec(ctx, insertStatement, params...)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to insert test data: %s", err)
|
||||
// partially cleanup if insert fails
|
||||
teardown := func(t *testing.T) {
|
||||
_, _ = pool.Exec(ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s;", tableName))
|
||||
}
|
||||
return teardown, fmt.Errorf("unable to insert test data: %w", err)
|
||||
}
|
||||
|
||||
// Return the cleanup function and nil for error
|
||||
return func(t *testing.T) {
|
||||
// tear down test
|
||||
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
|
||||
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s;", tableName))
|
||||
if err != nil {
|
||||
t.Errorf("Teardown failed: %s", err)
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
// SetupMsSQLTable creates and inserts data into a table of tool
|
||||
|
||||
@@ -89,12 +89,18 @@ func TestOracleSimpleToolEndpoints(t *testing.T) {
|
||||
|
||||
// set up data for param tool
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getOracleParamToolInfo(tableNameParam)
|
||||
teardownTable1 := setupOracleTable(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
teardownTable1, err := setupOracleTable(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Oracle table %s: %v", tableNameParam, err)
|
||||
}
|
||||
defer teardownTable1(t)
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getOracleAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := setupOracleTable(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
teardownTable2, err := setupOracleTable(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Oracle table %s: %v", tableNameAuth, err)
|
||||
}
|
||||
defer teardownTable2(t)
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
@@ -135,31 +141,31 @@ func TestOracleSimpleToolEndpoints(t *testing.T) {
|
||||
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
|
||||
}
|
||||
|
||||
func setupOracleTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
|
||||
func setupOracleTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) (func(*testing.T), error) {
|
||||
err := pool.PingContext(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to connect to test database: %s", err)
|
||||
return nil, fmt.Errorf("unable to connect to test database: %w", err)
|
||||
}
|
||||
|
||||
// Create table
|
||||
_, err = pool.QueryContext(ctx, createStatement)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
||||
}
|
||||
|
||||
// Insert test data
|
||||
_, err = pool.QueryContext(ctx, insertStatement, params...)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to insert test data: %s", err)
|
||||
return nil, fmt.Errorf("unable to insert test data: %w", err)
|
||||
}
|
||||
|
||||
return func(t *testing.T) {
|
||||
// tear down test
|
||||
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s", tableName))
|
||||
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s CASCADE CONSTRAINTS", tableName))
|
||||
if err != nil {
|
||||
t.Errorf("Teardown failed: %s", err)
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
func getOracleParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
|
||||
|
||||
@@ -103,13 +103,24 @@ func TestPostgres(t *testing.T) {
|
||||
|
||||
// set up data for param tool
|
||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
defer teardownTable1(t)
|
||||
// teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||
if teardownTable1 != nil {
|
||||
defer teardownTable1(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
defer teardownTable2(t)
|
||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||
if teardownTable2 != nil {
|
||||
defer teardownTable2(t)
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatalf("Setup failed: %v", err)
|
||||
}
|
||||
|
||||
// Set up table for semantic search
|
||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||
|
||||
@@ -115,23 +115,35 @@ func TestSpannerToolEndpoints(t *testing.T) {
|
||||
SpannerInstance,
|
||||
SpannerDatabase,
|
||||
)
|
||||
teardownTable1 := setupSpannerTable(t, ctx, adminClient, dataClient, createParamTableStmt, insertParamTableStmt, tableNameParam, dbString, paramTestParams)
|
||||
teardownTable1, err := setupSpannerTable(t, ctx, adminClient, dataClient, createParamTableStmt, insertParamTableStmt, tableNameParam, dbString, paramTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameParam, err)
|
||||
}
|
||||
defer teardownTable1(t)
|
||||
|
||||
// set up data for auth tool
|
||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getSpannerAuthToolInfo(tableNameAuth)
|
||||
teardownTable2 := setupSpannerTable(t, ctx, adminClient, dataClient, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, dbString, authTestParams)
|
||||
teardownTable2, err := setupSpannerTable(t, ctx, adminClient, dataClient, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, dbString, authTestParams)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameAuth, err)
|
||||
}
|
||||
defer teardownTable2(t)
|
||||
|
||||
// set up data for template param tool
|
||||
createStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX), age INT64) PRIMARY KEY (id)", tableNameTemplateParam)
|
||||
teardownTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createStatementTmpl, "", tableNameTemplateParam, dbString, nil)
|
||||
teardownTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createStatementTmpl, "", tableNameTemplateParam, dbString, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameTemplateParam, err)
|
||||
}
|
||||
defer teardownTableTmpl(t)
|
||||
|
||||
// set up for graph tool
|
||||
nodeTableName := "node_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
createNodeStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64 NOT NULL) PRIMARY KEY (id)", nodeTableName)
|
||||
teardownNodeTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createNodeStatementTmpl, "", nodeTableName, dbString, nil)
|
||||
teardownNodeTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createNodeStatementTmpl, "", nodeTableName, dbString, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Spanner table %s: %v", nodeTableName, err)
|
||||
}
|
||||
defer teardownNodeTableTmpl(t)
|
||||
|
||||
edgeTableName := "edge_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
@@ -143,7 +155,10 @@ func TestSpannerToolEndpoints(t *testing.T) {
|
||||
) PRIMARY KEY (id, target_id),
|
||||
INTERLEAVE IN PARENT %[2]s ON DELETE CASCADE
|
||||
`, edgeTableName, nodeTableName)
|
||||
teardownEdgeTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createEdgeStatementTmpl, "", edgeTableName, dbString, nil)
|
||||
teardownEdgeTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createEdgeStatementTmpl, "", edgeTableName, dbString, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to setup Spanner table %s: %v", edgeTableName, err)
|
||||
}
|
||||
defer teardownEdgeTableTmpl(t)
|
||||
|
||||
graphName := "graph_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
@@ -243,7 +258,7 @@ func getSpannerAuthToolInfo(tableName string) (string, string, string, map[strin
|
||||
|
||||
// setupSpannerTable creates and inserts data into a table of tool
|
||||
// compatible with spanner-sql tool
|
||||
func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.DatabaseAdminClient, dataClient *spanner.Client, createStatement, insertStatement, tableName, dbString string, params map[string]any) func(*testing.T) {
|
||||
func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.DatabaseAdminClient, dataClient *spanner.Client, createStatement, insertStatement, tableName, dbString string, params map[string]any) (func(*testing.T), error) {
|
||||
|
||||
// Create table
|
||||
op, err := adminClient.UpdateDatabaseDdl(ctx, &databasepb.UpdateDatabaseDdlRequest{
|
||||
@@ -251,11 +266,11 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
||||
Statements: []string{createStatement},
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("unable to start create table operation %s: %s", tableName, err)
|
||||
return nil, fmt.Errorf("unable to start create table operation %s: %w", tableName, err)
|
||||
}
|
||||
err = op.Wait(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
||||
}
|
||||
|
||||
// Insert test data
|
||||
@@ -269,7 +284,7 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
||||
return err
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("unable to insert test data: %s", err)
|
||||
return nil, fmt.Errorf("unable to insert test data: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -288,7 +303,7 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
||||
if opErr != nil {
|
||||
t.Errorf("Teardown failed: %s", opErr)
|
||||
}
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
|
||||
// setupSpannerGraph creates a graph and inserts data into it.
|
||||
|
||||
Reference in New Issue
Block a user