mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-02-03 11:45:10 -05:00
Compare commits
5 Commits
integratio
...
refactor-q
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
57b8de818b | ||
|
|
9b4d59aed8 | ||
|
|
3f1908a822 | ||
|
|
eef7a94977 | ||
|
|
4c96bb5c81 |
@@ -1,47 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: 'golang:1.25.1'
|
|
||||||
id: 'go-quickstart-test'
|
|
||||||
entrypoint: 'bash'
|
|
||||||
args:
|
|
||||||
# The '-c' flag tells bash to execute the following string as a command.
|
|
||||||
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
set -ex
|
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
|
||||||
chmod +x .ci/quickstart_test/run_go_tests.sh
|
|
||||||
.ci/quickstart_test/run_go_tests.sh
|
|
||||||
env:
|
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
|
||||||
- 'DB_USER=${_DB_USER}'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
|
||||||
|
|
||||||
availableSecrets:
|
|
||||||
secretManager:
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/7
|
|
||||||
env: 'TOOLS_YAML_CONTENT'
|
|
||||||
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
|
|
||||||
env: 'GOOGLE_API_KEY'
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
|
|
||||||
env: 'DB_PASSWORD'
|
|
||||||
|
|
||||||
timeout: 1000s
|
|
||||||
|
|
||||||
options:
|
|
||||||
logging: CLOUD_LOGGING_ONLY
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: 'node:22'
|
|
||||||
id: 'js-quickstart-test'
|
|
||||||
entrypoint: 'bash'
|
|
||||||
args:
|
|
||||||
# The '-c' flag tells bash to execute the following string as a command.
|
|
||||||
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
set -ex
|
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
|
||||||
chmod +x .ci/quickstart_test/run_js_tests.sh
|
|
||||||
.ci/quickstart_test/run_js_tests.sh
|
|
||||||
env:
|
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
|
||||||
- 'DB_USER=${_DB_USER}'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
|
||||||
|
|
||||||
availableSecrets:
|
|
||||||
secretManager:
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/6
|
|
||||||
env: 'TOOLS_YAML_CONTENT'
|
|
||||||
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
|
|
||||||
env: 'GOOGLE_API_KEY'
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
|
|
||||||
env: 'DB_PASSWORD'
|
|
||||||
|
|
||||||
timeout: 1000s
|
|
||||||
|
|
||||||
options:
|
|
||||||
logging: CLOUD_LOGGING_ONLY
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: 'gcr.io/google.com/cloudsdktool/cloud-sdk:537.0.0'
|
|
||||||
id: 'python-quickstart-test'
|
|
||||||
entrypoint: 'bash'
|
|
||||||
args:
|
|
||||||
# The '-c' flag tells bash to execute the following string as a command.
|
|
||||||
# The 'set -ex' enables debug output and exits on error for easier troubleshooting.
|
|
||||||
- -c
|
|
||||||
- |
|
|
||||||
set -ex
|
|
||||||
export VERSION=$(cat ./cmd/version.txt)
|
|
||||||
chmod +x .ci/quickstart_test/run_py_tests.sh
|
|
||||||
.ci/quickstart_test/run_py_tests.sh
|
|
||||||
env:
|
|
||||||
- 'CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}'
|
|
||||||
- 'GCP_PROJECT=${_GCP_PROJECT}'
|
|
||||||
- 'DATABASE_NAME=${_DATABASE_NAME}'
|
|
||||||
- 'DB_USER=${_DB_USER}'
|
|
||||||
secretEnv: ['TOOLS_YAML_CONTENT', 'GOOGLE_API_KEY', 'DB_PASSWORD']
|
|
||||||
|
|
||||||
availableSecrets:
|
|
||||||
secretManager:
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5
|
|
||||||
env: 'TOOLS_YAML_CONTENT'
|
|
||||||
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
|
|
||||||
env: 'GOOGLE_API_KEY'
|
|
||||||
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
|
|
||||||
env: 'DB_PASSWORD'
|
|
||||||
|
|
||||||
timeout: 1000s
|
|
||||||
|
|
||||||
options:
|
|
||||||
logging: CLOUD_LOGGING_ONLY
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
TABLE_NAME="hotels_go"
|
|
||||||
QUICKSTART_GO_DIR="docs/en/getting-started/quickstart/go"
|
|
||||||
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
|
||||||
|
|
||||||
PROXY_PID=""
|
|
||||||
TOOLBOX_PID=""
|
|
||||||
|
|
||||||
install_system_packages() {
|
|
||||||
apt-get update && apt-get install -y \
|
|
||||||
postgresql-client \
|
|
||||||
wget \
|
|
||||||
gettext-base \
|
|
||||||
netcat-openbsd
|
|
||||||
}
|
|
||||||
|
|
||||||
start_cloud_sql_proxy() {
|
|
||||||
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
|
||||||
chmod +x /usr/local/bin/cloud-sql-proxy
|
|
||||||
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
|
||||||
PROXY_PID=$!
|
|
||||||
|
|
||||||
for i in {1..30}; do
|
|
||||||
if nc -z 127.0.0.1 5432; then
|
|
||||||
echo "Cloud SQL Proxy is up and running."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Cloud SQL Proxy failed to start within the timeout period."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_toolbox() {
|
|
||||||
TOOLBOX_YAML="/tools.yaml"
|
|
||||||
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
|
||||||
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
|
||||||
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
|
||||||
chmod +x "/toolbox"
|
|
||||||
/toolbox --tools-file "$TOOLBOX_YAML" &
|
|
||||||
TOOLBOX_PID=$!
|
|
||||||
sleep 2
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_orch_table() {
|
|
||||||
export TABLE_NAME
|
|
||||||
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_orch_test() {
|
|
||||||
local orch_dir="$1"
|
|
||||||
local orch_name
|
|
||||||
orch_name=$(basename "$orch_dir")
|
|
||||||
|
|
||||||
if [ "$orch_name" == "openAI" ]; then
|
|
||||||
echo -e "\nSkipping framework '${orch_name}': Temporarily excluded."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
(
|
|
||||||
set -e
|
|
||||||
setup_orch_table
|
|
||||||
|
|
||||||
echo "--- Preparing module for $orch_name ---"
|
|
||||||
cd "$orch_dir"
|
|
||||||
|
|
||||||
if [ -f "go.mod" ]; then
|
|
||||||
go mod tidy
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
export ORCH_NAME="$orch_name"
|
|
||||||
|
|
||||||
echo "--- Running tests for $orch_name ---"
|
|
||||||
go test -v ./...
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup_all() {
|
|
||||||
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
|
||||||
if [ -n "$TOOLBOX_PID" ]; then
|
|
||||||
kill $TOOLBOX_PID || true
|
|
||||||
fi
|
|
||||||
if [ -n "$PROXY_PID" ]; then
|
|
||||||
kill $PROXY_PID || true
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
trap cleanup_all EXIT
|
|
||||||
|
|
||||||
# Main script execution
|
|
||||||
install_system_packages
|
|
||||||
start_cloud_sql_proxy
|
|
||||||
|
|
||||||
export PGHOST=127.0.0.1
|
|
||||||
export PGPORT=5432
|
|
||||||
export PGPASSWORD="$DB_PASSWORD"
|
|
||||||
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
|
||||||
|
|
||||||
setup_toolbox
|
|
||||||
|
|
||||||
for ORCH_DIR in "$QUICKSTART_GO_DIR"/*/; do
|
|
||||||
if [ ! -d "$ORCH_DIR" ]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
run_orch_test "$ORCH_DIR"
|
|
||||||
done
|
|
||||||
@@ -1,125 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
TABLE_NAME="hotels_js"
|
|
||||||
QUICKSTART_JS_DIR="docs/en/getting-started/quickstart/js"
|
|
||||||
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
|
||||||
|
|
||||||
# Initialize process IDs to empty at the top of the script
|
|
||||||
PROXY_PID=""
|
|
||||||
TOOLBOX_PID=""
|
|
||||||
|
|
||||||
install_system_packages() {
|
|
||||||
apt-get update && apt-get install -y \
|
|
||||||
postgresql-client \
|
|
||||||
wget \
|
|
||||||
gettext-base \
|
|
||||||
netcat-openbsd
|
|
||||||
}
|
|
||||||
|
|
||||||
start_cloud_sql_proxy() {
|
|
||||||
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
|
||||||
chmod +x /usr/local/bin/cloud-sql-proxy
|
|
||||||
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
|
||||||
PROXY_PID=$!
|
|
||||||
|
|
||||||
for i in {1..30}; do
|
|
||||||
if nc -z 127.0.0.1 5432; then
|
|
||||||
echo "Cloud SQL Proxy is up and running."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Cloud SQL Proxy failed to start within the timeout period."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_toolbox() {
|
|
||||||
TOOLBOX_YAML="/tools.yaml"
|
|
||||||
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
|
||||||
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
|
||||||
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
|
||||||
chmod +x "/toolbox"
|
|
||||||
/toolbox --tools-file "$TOOLBOX_YAML" &
|
|
||||||
TOOLBOX_PID=$!
|
|
||||||
sleep 2
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_orch_table() {
|
|
||||||
export TABLE_NAME
|
|
||||||
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_orch_test() {
|
|
||||||
local orch_dir="$1"
|
|
||||||
local orch_name
|
|
||||||
orch_name=$(basename "$orch_dir")
|
|
||||||
|
|
||||||
(
|
|
||||||
set -e
|
|
||||||
echo "--- Preparing environment for $orch_name ---"
|
|
||||||
setup_orch_table
|
|
||||||
|
|
||||||
cd "$orch_dir"
|
|
||||||
echo "Installing dependencies for $orch_name..."
|
|
||||||
if [ -f "package-lock.json" ]; then
|
|
||||||
npm ci
|
|
||||||
else
|
|
||||||
npm install
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
echo "--- Running tests for $orch_name ---"
|
|
||||||
export ORCH_NAME="$orch_name"
|
|
||||||
node --test quickstart.test.js
|
|
||||||
|
|
||||||
echo "--- Cleaning environment for $orch_name ---"
|
|
||||||
rm -rf "${orch_name}/node_modules"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup_all() {
|
|
||||||
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
|
||||||
if [ -n "$TOOLBOX_PID" ]; then
|
|
||||||
kill $TOOLBOX_PID || true
|
|
||||||
fi
|
|
||||||
if [ -n "$PROXY_PID" ]; then
|
|
||||||
kill $PROXY_PID || true
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
trap cleanup_all EXIT
|
|
||||||
|
|
||||||
# Main script execution
|
|
||||||
install_system_packages
|
|
||||||
start_cloud_sql_proxy
|
|
||||||
|
|
||||||
export PGHOST=127.0.0.1
|
|
||||||
export PGPORT=5432
|
|
||||||
export PGPASSWORD="$DB_PASSWORD"
|
|
||||||
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
|
||||||
|
|
||||||
setup_toolbox
|
|
||||||
|
|
||||||
for ORCH_DIR in "$QUICKSTART_JS_DIR"/*/; do
|
|
||||||
if [ ! -d "$ORCH_DIR" ]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
run_orch_test "$ORCH_DIR"
|
|
||||||
done
|
|
||||||
@@ -1,115 +0,0 @@
|
|||||||
# Copyright 2025 Google LLC
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
TABLE_NAME="hotels_python"
|
|
||||||
QUICKSTART_PYTHON_DIR="docs/en/getting-started/quickstart/python"
|
|
||||||
SQL_FILE=".ci/quickstart_test/setup_hotels_sample.sql"
|
|
||||||
|
|
||||||
PROXY_PID=""
|
|
||||||
TOOLBOX_PID=""
|
|
||||||
|
|
||||||
install_system_packages() {
|
|
||||||
apt-get update && apt-get install -y \
|
|
||||||
postgresql-client \
|
|
||||||
python3-venv \
|
|
||||||
wget \
|
|
||||||
gettext-base \
|
|
||||||
netcat-openbsd
|
|
||||||
}
|
|
||||||
|
|
||||||
start_cloud_sql_proxy() {
|
|
||||||
wget "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
|
||||||
chmod +x /usr/local/bin/cloud-sql-proxy
|
|
||||||
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" &
|
|
||||||
PROXY_PID=$!
|
|
||||||
|
|
||||||
for i in {1..30}; do
|
|
||||||
if nc -z 127.0.0.1 5432; then
|
|
||||||
echo "Cloud SQL Proxy is up and running."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
|
|
||||||
echo "Cloud SQL Proxy failed to start within the timeout period."
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_toolbox() {
|
|
||||||
TOOLBOX_YAML="/tools.yaml"
|
|
||||||
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
|
||||||
if [ ! -f "$TOOLBOX_YAML" ]; then echo "Failed to create tools.yaml"; exit 1; fi
|
|
||||||
wget "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
|
||||||
chmod +x "/toolbox"
|
|
||||||
/toolbox --tools-file "$TOOLBOX_YAML" &
|
|
||||||
TOOLBOX_PID=$!
|
|
||||||
sleep 2
|
|
||||||
}
|
|
||||||
|
|
||||||
setup_orch_table() {
|
|
||||||
export TABLE_NAME
|
|
||||||
envsubst < "$SQL_FILE" | psql -h "$PGHOST" -p "$PGPORT" -U "$DB_USER" -d "$DATABASE_NAME"
|
|
||||||
}
|
|
||||||
|
|
||||||
run_orch_test() {
|
|
||||||
local orch_dir="$1"
|
|
||||||
local orch_name
|
|
||||||
orch_name=$(basename "$orch_dir")
|
|
||||||
(
|
|
||||||
set -e
|
|
||||||
setup_orch_table
|
|
||||||
cd "$orch_dir"
|
|
||||||
local VENV_DIR=".venv"
|
|
||||||
python3 -m venv "$VENV_DIR"
|
|
||||||
source "$VENV_DIR/bin/activate"
|
|
||||||
pip install -r requirements.txt
|
|
||||||
echo "--- Running tests for $orch_name ---"
|
|
||||||
cd ..
|
|
||||||
ORCH_NAME="$orch_name" pytest
|
|
||||||
rm -rf "$VENV_DIR"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
cleanup_all() {
|
|
||||||
echo "--- Final cleanup: Shutting down processes and dropping table ---"
|
|
||||||
if [ -n "$TOOLBOX_PID" ]; then
|
|
||||||
kill $TOOLBOX_PID || true
|
|
||||||
fi
|
|
||||||
if [ -n "$PROXY_PID" ]; then
|
|
||||||
kill $PROXY_PID || true
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
trap cleanup_all EXIT
|
|
||||||
|
|
||||||
# Main script execution
|
|
||||||
install_system_packages
|
|
||||||
start_cloud_sql_proxy
|
|
||||||
|
|
||||||
export PGHOST=127.0.0.1
|
|
||||||
export PGPORT=5432
|
|
||||||
export PGPASSWORD="$DB_PASSWORD"
|
|
||||||
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
|
||||||
|
|
||||||
setup_toolbox
|
|
||||||
|
|
||||||
for ORCH_DIR in "$QUICKSTART_PYTHON_DIR"/*/; do
|
|
||||||
if [ ! -d "$ORCH_DIR" ]; then
|
|
||||||
continue
|
|
||||||
fi
|
|
||||||
run_orch_test "$ORCH_DIR"
|
|
||||||
done
|
|
||||||
59
.ci/universal/integration.cloudbuild.yaml
Normal file
59
.ci/universal/integration.cloudbuild.yaml
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# Copyright 2026 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: "${_IMAGE}"
|
||||||
|
id: "universal-test"
|
||||||
|
entrypoint: "bash"
|
||||||
|
args:
|
||||||
|
- -c
|
||||||
|
- |
|
||||||
|
set -ex
|
||||||
|
chmod +x .ci/universal/run_tests.sh
|
||||||
|
.ci/universal/run_tests.sh
|
||||||
|
env:
|
||||||
|
- "CLOUD_SQL_INSTANCE=${_CLOUD_SQL_INSTANCE}"
|
||||||
|
- "GCP_PROJECT=${_GCP_PROJECT}"
|
||||||
|
- "DATABASE_NAME=${_DATABASE_NAME}"
|
||||||
|
- "DB_USER=${_DB_USER}"
|
||||||
|
- "TARGET_ROOT=${_TARGET_ROOT}"
|
||||||
|
- "TARGET_LANG=${_TARGET_LANG}"
|
||||||
|
- "TABLE_NAME=${_TABLE_NAME}"
|
||||||
|
- "SQL_FILE=${_SQL_FILE}"
|
||||||
|
- "AGENT_FILE_PATTERN=${_AGENT_FILE_PATTERN}"
|
||||||
|
secretEnv: ["TOOLS_YAML_CONTENT", "GOOGLE_API_KEY", "DB_PASSWORD"]
|
||||||
|
|
||||||
|
availableSecrets:
|
||||||
|
secretManager:
|
||||||
|
- versionName: projects/${_GCP_PROJECT}/secrets/${_TOOLS_YAML_SECRET}/versions/5
|
||||||
|
env: "TOOLS_YAML_CONTENT"
|
||||||
|
- versionName: projects/${_GCP_PROJECT_NUMBER}/secrets/${_API_KEY_SECRET}/versions/latest
|
||||||
|
env: "GOOGLE_API_KEY"
|
||||||
|
- versionName: projects/${_GCP_PROJECT}/secrets/${_DB_PASS_SECRET}/versions/latest
|
||||||
|
env: "DB_PASSWORD"
|
||||||
|
|
||||||
|
|
||||||
|
timeout: 1200s
|
||||||
|
|
||||||
|
substitutions:
|
||||||
|
_TARGET_LANG: "python"
|
||||||
|
_IMAGE: "python:3.11"
|
||||||
|
_TARGET_ROOT: "docs/en/getting-started/quickstart/python"
|
||||||
|
_TABLE_NAME: "hotels_python"
|
||||||
|
_SQL_FILE: ".ci/universal/setup_hotels.sql"
|
||||||
|
_AGENT_FILE_PATTERN: "quickstart.py"
|
||||||
|
_LOG_BUCKET: "toolbox-test-logs"
|
||||||
|
|
||||||
|
options:
|
||||||
|
logging: CLOUD_LOGGING_ONLY
|
||||||
173
.ci/universal/run_tests.sh
Normal file
173
.ci/universal/run_tests.sh
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright 2026 Google LLC
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# --- Configuration (from Environment Variables) ---
|
||||||
|
# TARGET_ROOT: The directory to search for tests (e.g., docs/en/getting-started/quickstart/js)
|
||||||
|
# TARGET_LANG: python, js, go
|
||||||
|
# TABLE_NAME: Database table name to use
|
||||||
|
# SQL_FILE: Path to the SQL setup file
|
||||||
|
# AGENT_FILE_PATTERN: Filename to look for (e.g., quickstart.js or agent.py)
|
||||||
|
|
||||||
|
VERSION=$(cat ./cmd/version.txt)
|
||||||
|
|
||||||
|
# Process IDs & Logs
|
||||||
|
PROXY_PID=""
|
||||||
|
TOOLBOX_PID=""
|
||||||
|
PROXY_LOG="cloud_sql_proxy.log"
|
||||||
|
TOOLBOX_LOG="toolbox_server.log"
|
||||||
|
|
||||||
|
install_system_packages() {
|
||||||
|
echo "Installing system packages..."
|
||||||
|
apt-get update && apt-get install -y \
|
||||||
|
postgresql-client \
|
||||||
|
wget \
|
||||||
|
gettext-base \
|
||||||
|
netcat-openbsd
|
||||||
|
|
||||||
|
if [[ "$TARGET_LANG" == "python" ]]; then
|
||||||
|
apt-get install -y python3-venv
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
start_cloud_sql_proxy() {
|
||||||
|
echo "Starting Cloud SQL Proxy..."
|
||||||
|
wget -q "https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.10.0/cloud-sql-proxy.linux.amd64" -O /usr/local/bin/cloud-sql-proxy
|
||||||
|
chmod +x /usr/local/bin/cloud-sql-proxy
|
||||||
|
cloud-sql-proxy "${CLOUD_SQL_INSTANCE}" > "$PROXY_LOG" 2>&1 &
|
||||||
|
PROXY_PID=$!
|
||||||
|
|
||||||
|
# Health Check
|
||||||
|
for i in {1..30}; do
|
||||||
|
if nc -z 127.0.0.1 5432; then
|
||||||
|
echo "Cloud SQL Proxy is up and running."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "ERROR: Cloud SQL Proxy failed to start. Logs:"
|
||||||
|
cat "$PROXY_LOG"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_toolbox() {
|
||||||
|
echo "Setting up Toolbox server..."
|
||||||
|
TOOLBOX_YAML="/tools.yaml"
|
||||||
|
echo "${TOOLS_YAML_CONTENT}" > "$TOOLBOX_YAML"
|
||||||
|
wget -q "https://storage.googleapis.com/genai-toolbox/v${VERSION}/linux/amd64/toolbox" -O "/toolbox"
|
||||||
|
chmod +x "/toolbox"
|
||||||
|
/toolbox --tools-file "$TOOLBOX_YAML" > "$TOOLBOX_LOG" 2>&1 &
|
||||||
|
TOOLBOX_PID=$!
|
||||||
|
|
||||||
|
# Health Check
|
||||||
|
for i in {1..15}; do
|
||||||
|
if nc -z 127.0.0.1 5000; then
|
||||||
|
echo "Toolbox server is up and running."
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "ERROR: Toolbox server failed to start. Logs:"
|
||||||
|
cat "$TOOLBOX_LOG"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
setup_db_table() {
|
||||||
|
echo "Setting up database table $TABLE_NAME using $SQL_FILE..."
|
||||||
|
export TABLE_NAME
|
||||||
|
envsubst < "$SQL_FILE" | psql -h 127.0.0.1 -p 5432 -U "$DB_USER" -d "$DATABASE_NAME"
|
||||||
|
}
|
||||||
|
|
||||||
|
run_python_test() {
|
||||||
|
local dir=$1
|
||||||
|
local name=$(basename "$dir")
|
||||||
|
echo "--- Running Python Test: $name ---"
|
||||||
|
(
|
||||||
|
cd "$dir"
|
||||||
|
python3 -m venv .venv
|
||||||
|
source .venv/bin/activate
|
||||||
|
pip install -q -r requirements.txt pytest
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
# If there is a pytest file in the parent directory (like agent_test.py or quickstart_test.py)
|
||||||
|
# we use it. Otherwise we just run the agent.
|
||||||
|
local test_file=$(find . -maxdepth 1 -name "*test.py" | head -n 1)
|
||||||
|
if [ -n "$test_file" ]; then
|
||||||
|
echo "Found native test: $test_file. Running pytest..."
|
||||||
|
export ORCH_NAME="$name"
|
||||||
|
export PYTHONPATH="../"
|
||||||
|
pytest "$test_file"
|
||||||
|
else
|
||||||
|
echo "No native test found. running agent directly..."
|
||||||
|
export PYTHONPATH="../"
|
||||||
|
python3 "${name}/${AGENT_FILE_PATTERN}"
|
||||||
|
fi
|
||||||
|
rm -rf "${name}/.venv"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
run_js_test() {
|
||||||
|
local dir=$1
|
||||||
|
local name=$(basename "$dir")
|
||||||
|
echo "--- Running JS Test: $name ---"
|
||||||
|
(
|
||||||
|
cd "$dir"
|
||||||
|
if [ -f "package-lock.json" ]; then npm ci -q; else npm install -q; fi
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
# Looking for a JS test file in the parent directory
|
||||||
|
local test_file=$(find . -maxdepth 1 -name "*test.js" | head -n 1)
|
||||||
|
if [ -n "$test_file" ]; then
|
||||||
|
echo "Found native test: $test_file. Running node --test..."
|
||||||
|
export ORCH_NAME="$name"
|
||||||
|
node --test "$test_file"
|
||||||
|
else
|
||||||
|
echo "No native test found. running agent directly..."
|
||||||
|
node "${name}/${AGENT_FILE_PATTERN}"
|
||||||
|
fi
|
||||||
|
rm -rf "${name}/node_modules"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
echo "Cleaning up background processes..."
|
||||||
|
[ -n "$TOOLBOX_PID" ] && kill "$TOOLBOX_PID" || true
|
||||||
|
[ -n "$PROXY_PID" ] && kill "$PROXY_PID" || true
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# --- Execution ---
|
||||||
|
install_system_packages
|
||||||
|
start_cloud_sql_proxy
|
||||||
|
|
||||||
|
export PGHOST=127.0.0.1
|
||||||
|
export PGPORT=5432
|
||||||
|
export PGPASSWORD="$DB_PASSWORD"
|
||||||
|
export GOOGLE_API_KEY="$GOOGLE_API_KEY"
|
||||||
|
|
||||||
|
setup_toolbox
|
||||||
|
setup_db_table
|
||||||
|
|
||||||
|
echo "Scanning $TARGET_ROOT for tests with pattern $AGENT_FILE_PATTERN..."
|
||||||
|
|
||||||
|
find "$TARGET_ROOT" -name "$AGENT_FILE_PATTERN" | while read -r agent_file; do
|
||||||
|
sample_dir=$(dirname "$agent_file")
|
||||||
|
if [[ "$TARGET_LANG" == "python" ]]; then
|
||||||
|
run_python_test "$sample_dir"
|
||||||
|
elif [[ "$TARGET_LANG" == "js" ]]; then
|
||||||
|
run_js_test "$sample_dir"
|
||||||
|
fi
|
||||||
|
done
|
||||||
@@ -37,7 +37,6 @@ https://dev.mysql.com/doc/refman/8.4/en/sql-prepared-statements.html
|
|||||||
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
https://dev.mysql.com/doc/refman/8.4/en/user-names.html
|
||||||
|
|
||||||
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
# npmjs links can occasionally trigger rate limiting during high-frequency CI builds
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/server
|
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/core
|
https://www.npmjs.com/package/@toolbox-sdk/core
|
||||||
https://www.npmjs.com/package/@toolbox-sdk/adk
|
https://www.npmjs.com/package/@toolbox-sdk/adk
|
||||||
https://www.oceanbase.com/
|
https://www.oceanbase.com/
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ export async function main() {
|
|||||||
|
|
||||||
for (const query of queries) {
|
for (const query of queries) {
|
||||||
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
conversationHistory.push({ role: "user", content: [{ text: query }] });
|
||||||
const response = await ai.generate({
|
let response = await ai.generate({
|
||||||
messages: conversationHistory,
|
messages: conversationHistory,
|
||||||
tools: tools,
|
tools: tools,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ The `invoke` command allows you to invoke tools defined in your configuration di
|
|||||||
|
|
||||||
{{< notice tip >}}
|
{{< notice tip >}}
|
||||||
**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation.
|
**Keep configurations minimal:** The `invoke` command initializes *all* resources (sources, tools, etc.) defined in your configuration files during execution. To ensure fast response times, consider using a minimal configuration file containing only the tools you need for the specific invocation.
|
||||||
{{< notice tip >}}
|
{{< /notice >}}
|
||||||
|
|
||||||
## Prerequisites
|
## Before you begin
|
||||||
|
|
||||||
- You have the `toolbox` binary installed or built.
|
1. Make sure you have the `toolbox` binary installed or built.
|
||||||
- You have a valid tool configuration file (e.g., `tools.yaml`).
|
2. Make sure you have a valid tool configuration file (e.g., `tools.yaml`).
|
||||||
|
|
||||||
## Basic Usage
|
## Basic Usage
|
||||||
|
|
||||||
|
|||||||
@@ -414,10 +414,10 @@ See [Usage Examples](../reference/cli.md#examples).
|
|||||||
entries.
|
entries.
|
||||||
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
|
* **Dataplex Editor** (`roles/dataplex.editor`) to modify entries.
|
||||||
* **Tools:**
|
* **Tools:**
|
||||||
* `dataplex_search_entries`: Searches for entries in Dataplex Catalog.
|
* `search_entries`: Searches for entries in Dataplex Catalog.
|
||||||
* `dataplex_lookup_entry`: Retrieves a specific entry from Dataplex
|
* `lookup_entry`: Retrieves a specific entry from Dataplex
|
||||||
Catalog.
|
Catalog.
|
||||||
* `dataplex_search_aspect_types`: Finds aspect types relevant to the
|
* `search_aspect_types`: Finds aspect types relevant to the
|
||||||
query.
|
query.
|
||||||
|
|
||||||
## Firestore
|
## Firestore
|
||||||
|
|||||||
@@ -139,24 +139,13 @@ func TestAlloyDBPgToolEndpoints(t *testing.T) {
|
|||||||
|
|
||||||
// set up data for param tool
|
// set up data for param tool
|
||||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||||
if teardownTable1 != nil {
|
defer teardownTable1(t)
|
||||||
defer teardownTable1(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||||
|
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
defer teardownTable2(t)
|
||||||
if teardownTable2 != nil {
|
|
||||||
defer teardownTable2(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up table for semanti search
|
// Set up table for semanti search
|
||||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||||
|
|||||||
@@ -84,6 +84,7 @@ func TestBigQueryToolEndpoints(t *testing.T) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
|
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// create table name with UUID
|
// create table name with UUID
|
||||||
datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
datasetName := fmt.Sprintf("temp_toolbox_test_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||||
tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
tableName := fmt.Sprintf("param_table_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||||
@@ -121,42 +122,27 @@ func TestBigQueryToolEndpoints(t *testing.T) {
|
|||||||
|
|
||||||
// set up data for param tool
|
// set up data for param tool
|
||||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
||||||
teardownTable1, err := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
teardownTable1 := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup param table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable1(t)
|
defer teardownTable1(t)
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getBigQueryAuthToolInfo(tableNameAuth)
|
||||||
teardownTable2, err := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
|
teardownTable2 := setupBigQueryTable(t, ctx, client, createAuthTableStmt, insertAuthTableStmt, datasetName, tableNameAuth, authTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup auth table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable2(t)
|
defer teardownTable2(t)
|
||||||
|
|
||||||
// set up data for data type test tool
|
// set up data for data type test tool
|
||||||
createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType)
|
createDataTypeTableStmt, insertDataTypeTableStmt, dataTypeToolStmt, arrayDataTypeToolStmt, dataTypeTestParams := getBigQueryDataTypeTestInfo(tableNameDataType)
|
||||||
teardownTable3, err := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
|
teardownTable3 := setupBigQueryTable(t, ctx, client, createDataTypeTableStmt, insertDataTypeTableStmt, datasetName, tableNameDataType, dataTypeTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup data type table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable3(t)
|
defer teardownTable3(t)
|
||||||
|
|
||||||
// set up data for forecast tool
|
// set up data for forecast tool
|
||||||
createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast)
|
createForecastTableStmt, insertForecastTableStmt, forecastTestParams := getBigQueryForecastToolInfo(tableNameForecast)
|
||||||
teardownTable4, err := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
|
teardownTable4 := setupBigQueryTable(t, ctx, client, createForecastTableStmt, insertForecastTableStmt, datasetName, tableNameForecast, forecastTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup forecast table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable4(t)
|
defer teardownTable4(t)
|
||||||
|
|
||||||
// set up data for analyze contribution tool
|
// set up data for analyze contribution tool
|
||||||
createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution)
|
createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, analyzeContributionTestParams := getBigQueryAnalyzeContributionToolInfo(tableNameAnalyzeContribution)
|
||||||
teardownTable5, err := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
|
teardownTable5 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionTableStmt, insertAnalyzeContributionTableStmt, datasetName, tableNameAnalyzeContribution, analyzeContributionTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup analyze contribution table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable5(t)
|
defer teardownTable5(t)
|
||||||
|
|
||||||
// Write config into a file and pass it to command
|
// Write config into a file and pass it to command
|
||||||
@@ -245,79 +231,52 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
|
|||||||
// Setup allowed table
|
// Setup allowed table
|
||||||
allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1)
|
allowedTableNameParam1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedTableName1)
|
||||||
createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1)
|
createAllowedTableStmt1 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam1)
|
||||||
teardownAllowed1, err:= setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
|
teardownAllowed1 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt1, "", allowedDatasetName1, allowedTableNameParam1, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed table 1: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowed1(t)
|
defer teardownAllowed1(t)
|
||||||
|
|
||||||
allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2)
|
allowedTableNameParam2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedTableName2)
|
||||||
createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2)
|
createAllowedTableStmt2 := fmt.Sprintf("CREATE TABLE %s (id INT64)", allowedTableNameParam2)
|
||||||
teardownAllowed2, err:= setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
|
teardownAllowed2 := setupBigQueryTable(t, ctx, client, createAllowedTableStmt2, "", allowedDatasetName2, allowedTableNameParam2, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed table 2: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowed2(t)
|
defer teardownAllowed2(t)
|
||||||
|
|
||||||
// Setup allowed forecast table
|
// Setup allowed forecast table
|
||||||
allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1)
|
allowedForecastTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedForecastTableName1)
|
||||||
createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1)
|
createForecastStmt1, insertForecastStmt1, forecastParams1 := getBigQueryForecastToolInfo(allowedForecastTableFullName1)
|
||||||
teardownAllowedForecast1, err:= setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
|
teardownAllowedForecast1 := setupBigQueryTable(t, ctx, client, createForecastStmt1, insertForecastStmt1, allowedDatasetName1, allowedForecastTableFullName1, forecastParams1)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed forecast table 1: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowedForecast1(t)
|
defer teardownAllowedForecast1(t)
|
||||||
|
|
||||||
allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2)
|
allowedForecastTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedForecastTableName2)
|
||||||
createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2)
|
createForecastStmt2, insertForecastStmt2, forecastParams2 := getBigQueryForecastToolInfo(allowedForecastTableFullName2)
|
||||||
teardownAllowedForecast2, err:= setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
|
teardownAllowedForecast2 := setupBigQueryTable(t, ctx, client, createForecastStmt2, insertForecastStmt2, allowedDatasetName2, allowedForecastTableFullName2, forecastParams2)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed forecast table 2: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowedForecast2(t)
|
defer teardownAllowedForecast2(t)
|
||||||
|
|
||||||
// Setup disallowed table
|
// Setup disallowed table
|
||||||
disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName)
|
disallowedTableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedTableName)
|
||||||
createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam)
|
createDisallowedTableStmt := fmt.Sprintf("CREATE TABLE %s (id INT64)", disallowedTableNameParam)
|
||||||
teardownDisallowed, err:= setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
|
teardownDisallowed := setupBigQueryTable(t, ctx, client, createDisallowedTableStmt, "", disallowedDatasetName, disallowedTableNameParam, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup disallowed table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownDisallowed(t)
|
defer teardownDisallowed(t)
|
||||||
|
|
||||||
// Setup disallowed forecast table
|
// Setup disallowed forecast table
|
||||||
disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName)
|
disallowedForecastTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedForecastTableName)
|
||||||
createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName)
|
createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedForecastParams := getBigQueryForecastToolInfo(disallowedForecastTableFullName)
|
||||||
teardownDisallowedForecast, err:= setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
|
teardownDisallowedForecast := setupBigQueryTable(t, ctx, client, createDisallowedForecastStmt, insertDisallowedForecastStmt, disallowedDatasetName, disallowedForecastTableFullName, disallowedForecastParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup disallowed forecast table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownDisallowedForecast(t)
|
defer teardownDisallowedForecast(t)
|
||||||
|
|
||||||
// Setup allowed analyze contribution table
|
// Setup allowed analyze contribution table
|
||||||
allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1)
|
allowedAnalyzeContributionTableFullName1 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName1, allowedAnalyzeContributionTableName1)
|
||||||
createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1)
|
createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, analyzeContributionParams1 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName1)
|
||||||
teardownAllowedAnalyzeContribution1, err:= setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
|
teardownAllowedAnalyzeContribution1 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt1, insertAnalyzeContributionStmt1, allowedDatasetName1, allowedAnalyzeContributionTableFullName1, analyzeContributionParams1)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed analyze contribution table 1: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowedAnalyzeContribution1(t)
|
defer teardownAllowedAnalyzeContribution1(t)
|
||||||
|
|
||||||
allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2)
|
allowedAnalyzeContributionTableFullName2 := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, allowedDatasetName2, allowedAnalyzeContributionTableName2)
|
||||||
createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2)
|
createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, analyzeContributionParams2 := getBigQueryAnalyzeContributionToolInfo(allowedAnalyzeContributionTableFullName2)
|
||||||
teardownAllowedAnalyzeContribution2, err:= setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
|
teardownAllowedAnalyzeContribution2 := setupBigQueryTable(t, ctx, client, createAnalyzeContributionStmt2, insertAnalyzeContributionStmt2, allowedDatasetName2, allowedAnalyzeContributionTableFullName2, analyzeContributionParams2)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup allowed analyze contribution table 2: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownAllowedAnalyzeContribution2(t)
|
defer teardownAllowedAnalyzeContribution2(t)
|
||||||
|
|
||||||
// Setup disallowed analyze contribution table
|
// Setup disallowed analyze contribution table
|
||||||
disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName)
|
disallowedAnalyzeContributionTableFullName := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, disallowedDatasetName, disallowedAnalyzeContributionTableName)
|
||||||
createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName)
|
createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedAnalyzeContributionParams := getBigQueryAnalyzeContributionToolInfo(disallowedAnalyzeContributionTableFullName)
|
||||||
teardownDisallowedAnalyzeContribution, err:= setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
|
teardownDisallowedAnalyzeContribution := setupBigQueryTable(t, ctx, client, createDisallowedAnalyzeContributionStmt, insertDisallowedAnalyzeContributionStmt, disallowedDatasetName, disallowedAnalyzeContributionTableFullName, disallowedAnalyzeContributionParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup disallowed analyze contribution table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownDisallowedAnalyzeContribution(t)
|
defer teardownDisallowedAnalyzeContribution(t)
|
||||||
|
|
||||||
// Configure source with dataset restriction.
|
// Configure source with dataset restriction.
|
||||||
@@ -479,10 +438,7 @@ func TestBigQueryWriteModeBlocked(t *testing.T) {
|
|||||||
t.Fatalf("unable to create BigQuery connection: %s", err)
|
t.Fatalf("unable to create BigQuery connection: %s", err)
|
||||||
}
|
}
|
||||||
createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
||||||
teardownTable ,err:= setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
teardownTable := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup BigQuery table: %s", err)
|
|
||||||
}
|
|
||||||
defer teardownTable(t)
|
defer teardownTable(t)
|
||||||
|
|
||||||
toolsFile := map[string]any{
|
toolsFile := map[string]any{
|
||||||
@@ -667,7 +623,7 @@ func getBigQueryTmplToolStatement() (string, string) {
|
|||||||
return tmplSelectCombined, tmplSelectFilterCombined
|
return tmplSelectCombined, tmplSelectFilterCombined
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) (func(*testing.T), error) {
|
func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.Client, createStatement, insertStatement, datasetName string, tableName string, params []bigqueryapi.QueryParameter) func(*testing.T) {
|
||||||
// Create dataset
|
// Create dataset
|
||||||
dataset := client.Dataset(datasetName)
|
dataset := client.Dataset(datasetName)
|
||||||
_, err := dataset.Metadata(ctx)
|
_, err := dataset.Metadata(ctx)
|
||||||
@@ -743,7 +699,7 @@ func setupBigQueryTable(t *testing.T, ctx context.Context, client *bigqueryapi.C
|
|||||||
} else if err != nil {
|
} else if err != nil {
|
||||||
t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
|
t.Errorf("Failed to list tables in dataset %s to check emptiness: %v.", datasetName, err)
|
||||||
}
|
}
|
||||||
}, nil
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[string]any {
|
func addBigQueryPrebuiltToolsConfig(t *testing.T, config map[string]any) map[string]any {
|
||||||
|
|||||||
@@ -124,23 +124,13 @@ func TestCloudSQLPgSimpleToolEndpoints(t *testing.T) {
|
|||||||
|
|
||||||
// set up data for param tool
|
// set up data for param tool
|
||||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||||
if teardownTable1 != nil {
|
defer teardownTable1(t)
|
||||||
defer teardownTable1(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||||
if teardownTable2 != nil {
|
defer teardownTable2(t)
|
||||||
defer teardownTable2(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up table for semantic search
|
// Set up table for semantic search
|
||||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||||
|
|||||||
@@ -613,36 +613,31 @@ func GetMySQLWants() (string, string, string, string) {
|
|||||||
|
|
||||||
// SetupPostgresSQLTable creates and inserts data into a table of tool
|
// SetupPostgresSQLTable creates and inserts data into a table of tool
|
||||||
// compatible with postgres-sql tool
|
// compatible with postgres-sql tool
|
||||||
func SetupPostgresSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, createStatement, insertStatement, tableName string, params []any) (func(*testing.T), error) {
|
func SetupPostgresSQLTable(t *testing.T, ctx context.Context, pool *pgxpool.Pool, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
|
||||||
err := pool.Ping(ctx)
|
err := pool.Ping(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// Return nil for the function and the error itself
|
t.Fatalf("unable to connect to test database: %s", err)
|
||||||
return nil, fmt.Errorf("unable to connect to test database: %w", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create table
|
// Create table
|
||||||
_, err = pool.Exec(ctx, createStatement)
|
_, err = pool.Query(ctx, createStatement)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert test data
|
// Insert test data
|
||||||
_, err = pool.Exec(ctx, insertStatement, params...)
|
_, err = pool.Query(ctx, insertStatement, params...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// partially cleanup if insert fails
|
t.Fatalf("unable to insert test data: %s", err)
|
||||||
teardown := func(t *testing.T) {
|
|
||||||
_, _ = pool.Exec(ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s;", tableName))
|
|
||||||
}
|
|
||||||
return teardown, fmt.Errorf("unable to insert test data: %w", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return the cleanup function and nil for error
|
|
||||||
return func(t *testing.T) {
|
return func(t *testing.T) {
|
||||||
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE IF EXISTS %s;", tableName))
|
// tear down test
|
||||||
|
_, err = pool.Exec(ctx, fmt.Sprintf("DROP TABLE %s;", tableName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Teardown failed: %s", err)
|
t.Errorf("Teardown failed: %s", err)
|
||||||
}
|
}
|
||||||
}, nil
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetupMsSQLTable creates and inserts data into a table of tool
|
// SetupMsSQLTable creates and inserts data into a table of tool
|
||||||
|
|||||||
@@ -89,18 +89,12 @@ func TestOracleSimpleToolEndpoints(t *testing.T) {
|
|||||||
|
|
||||||
// set up data for param tool
|
// set up data for param tool
|
||||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getOracleParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := getOracleParamToolInfo(tableNameParam)
|
||||||
teardownTable1, err := setupOracleTable(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
teardownTable1 := setupOracleTable(t, ctx, db, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Oracle table %s: %v", tableNameParam, err)
|
|
||||||
}
|
|
||||||
defer teardownTable1(t)
|
defer teardownTable1(t)
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getOracleAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getOracleAuthToolInfo(tableNameAuth)
|
||||||
teardownTable2, err := setupOracleTable(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
teardownTable2 := setupOracleTable(t, ctx, db, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Oracle table %s: %v", tableNameAuth, err)
|
|
||||||
}
|
|
||||||
defer teardownTable2(t)
|
defer teardownTable2(t)
|
||||||
|
|
||||||
// Write config into a file and pass it to command
|
// Write config into a file and pass it to command
|
||||||
@@ -141,31 +135,31 @@ func TestOracleSimpleToolEndpoints(t *testing.T) {
|
|||||||
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
|
tests.RunToolInvokeWithTemplateParameters(t, tableNameTemplateParam)
|
||||||
}
|
}
|
||||||
|
|
||||||
func setupOracleTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) (func(*testing.T), error) {
|
func setupOracleTable(t *testing.T, ctx context.Context, pool *sql.DB, createStatement, insertStatement, tableName string, params []any) func(*testing.T) {
|
||||||
err := pool.PingContext(ctx)
|
err := pool.PingContext(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to connect to test database: %w", err)
|
t.Fatalf("unable to connect to test database: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create table
|
// Create table
|
||||||
_, err = pool.QueryContext(ctx, createStatement)
|
_, err = pool.QueryContext(ctx, createStatement)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert test data
|
// Insert test data
|
||||||
_, err = pool.QueryContext(ctx, insertStatement, params...)
|
_, err = pool.QueryContext(ctx, insertStatement, params...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to insert test data: %w", err)
|
t.Fatalf("unable to insert test data: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
return func(t *testing.T) {
|
return func(t *testing.T) {
|
||||||
// tear down test
|
// tear down test
|
||||||
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s CASCADE CONSTRAINTS", tableName))
|
_, err = pool.ExecContext(ctx, fmt.Sprintf("DROP TABLE %s", tableName))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
t.Errorf("Teardown failed: %s", err)
|
t.Errorf("Teardown failed: %s", err)
|
||||||
}
|
}
|
||||||
}, nil
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getOracleParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
|
func getOracleParamToolInfo(tableName string) (string, string, string, string, string, string, []any) {
|
||||||
|
|||||||
@@ -103,24 +103,13 @@ func TestPostgres(t *testing.T) {
|
|||||||
|
|
||||||
// set up data for param tool
|
// set up data for param tool
|
||||||
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
createParamTableStmt, insertParamTableStmt, paramToolStmt, idParamToolStmt, nameParamToolStmt, arrayToolStmt, paramTestParams := tests.GetPostgresSQLParamToolInfo(tableNameParam)
|
||||||
// teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
teardownTable1 := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
||||||
teardownTable1, err := tests.SetupPostgresSQLTable(t, ctx, pool, createParamTableStmt, insertParamTableStmt, tableNameParam, paramTestParams)
|
defer teardownTable1(t)
|
||||||
if teardownTable1 != nil {
|
|
||||||
defer teardownTable1(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := tests.GetPostgresSQLAuthToolInfo(tableNameAuth)
|
||||||
teardownTable2, err := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
teardownTable2 := tests.SetupPostgresSQLTable(t, ctx, pool, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, authTestParams)
|
||||||
if teardownTable2 != nil {
|
defer teardownTable2(t)
|
||||||
defer teardownTable2(t)
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Setup failed: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set up table for semantic search
|
// Set up table for semantic search
|
||||||
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
vectorTableName, tearDownVectorTable := tests.SetupPostgresVectorTable(t, ctx, pool)
|
||||||
|
|||||||
@@ -115,35 +115,23 @@ func TestSpannerToolEndpoints(t *testing.T) {
|
|||||||
SpannerInstance,
|
SpannerInstance,
|
||||||
SpannerDatabase,
|
SpannerDatabase,
|
||||||
)
|
)
|
||||||
teardownTable1, err := setupSpannerTable(t, ctx, adminClient, dataClient, createParamTableStmt, insertParamTableStmt, tableNameParam, dbString, paramTestParams)
|
teardownTable1 := setupSpannerTable(t, ctx, adminClient, dataClient, createParamTableStmt, insertParamTableStmt, tableNameParam, dbString, paramTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameParam, err)
|
|
||||||
}
|
|
||||||
defer teardownTable1(t)
|
defer teardownTable1(t)
|
||||||
|
|
||||||
// set up data for auth tool
|
// set up data for auth tool
|
||||||
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getSpannerAuthToolInfo(tableNameAuth)
|
createAuthTableStmt, insertAuthTableStmt, authToolStmt, authTestParams := getSpannerAuthToolInfo(tableNameAuth)
|
||||||
teardownTable2, err := setupSpannerTable(t, ctx, adminClient, dataClient, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, dbString, authTestParams)
|
teardownTable2 := setupSpannerTable(t, ctx, adminClient, dataClient, createAuthTableStmt, insertAuthTableStmt, tableNameAuth, dbString, authTestParams)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameAuth, err)
|
|
||||||
}
|
|
||||||
defer teardownTable2(t)
|
defer teardownTable2(t)
|
||||||
|
|
||||||
// set up data for template param tool
|
// set up data for template param tool
|
||||||
createStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX), age INT64) PRIMARY KEY (id)", tableNameTemplateParam)
|
createStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64, name STRING(MAX), age INT64) PRIMARY KEY (id)", tableNameTemplateParam)
|
||||||
teardownTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createStatementTmpl, "", tableNameTemplateParam, dbString, nil)
|
teardownTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createStatementTmpl, "", tableNameTemplateParam, dbString, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Spanner table %s: %v", tableNameTemplateParam, err)
|
|
||||||
}
|
|
||||||
defer teardownTableTmpl(t)
|
defer teardownTableTmpl(t)
|
||||||
|
|
||||||
// set up for graph tool
|
// set up for graph tool
|
||||||
nodeTableName := "node_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
nodeTableName := "node_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||||
createNodeStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64 NOT NULL) PRIMARY KEY (id)", nodeTableName)
|
createNodeStatementTmpl := fmt.Sprintf("CREATE TABLE %s (id INT64 NOT NULL) PRIMARY KEY (id)", nodeTableName)
|
||||||
teardownNodeTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createNodeStatementTmpl, "", nodeTableName, dbString, nil)
|
teardownNodeTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createNodeStatementTmpl, "", nodeTableName, dbString, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Spanner table %s: %v", nodeTableName, err)
|
|
||||||
}
|
|
||||||
defer teardownNodeTableTmpl(t)
|
defer teardownNodeTableTmpl(t)
|
||||||
|
|
||||||
edgeTableName := "edge_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
edgeTableName := "edge_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||||
@@ -155,10 +143,7 @@ func TestSpannerToolEndpoints(t *testing.T) {
|
|||||||
) PRIMARY KEY (id, target_id),
|
) PRIMARY KEY (id, target_id),
|
||||||
INTERLEAVE IN PARENT %[2]s ON DELETE CASCADE
|
INTERLEAVE IN PARENT %[2]s ON DELETE CASCADE
|
||||||
`, edgeTableName, nodeTableName)
|
`, edgeTableName, nodeTableName)
|
||||||
teardownEdgeTableTmpl, err := setupSpannerTable(t, ctx, adminClient, dataClient, createEdgeStatementTmpl, "", edgeTableName, dbString, nil)
|
teardownEdgeTableTmpl := setupSpannerTable(t, ctx, adminClient, dataClient, createEdgeStatementTmpl, "", edgeTableName, dbString, nil)
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("failed to setup Spanner table %s: %v", edgeTableName, err)
|
|
||||||
}
|
|
||||||
defer teardownEdgeTableTmpl(t)
|
defer teardownEdgeTableTmpl(t)
|
||||||
|
|
||||||
graphName := "graph_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
graphName := "graph_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||||
@@ -258,7 +243,7 @@ func getSpannerAuthToolInfo(tableName string) (string, string, string, map[strin
|
|||||||
|
|
||||||
// setupSpannerTable creates and inserts data into a table of tool
|
// setupSpannerTable creates and inserts data into a table of tool
|
||||||
// compatible with spanner-sql tool
|
// compatible with spanner-sql tool
|
||||||
func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.DatabaseAdminClient, dataClient *spanner.Client, createStatement, insertStatement, tableName, dbString string, params map[string]any) (func(*testing.T), error) {
|
func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.DatabaseAdminClient, dataClient *spanner.Client, createStatement, insertStatement, tableName, dbString string, params map[string]any) func(*testing.T) {
|
||||||
|
|
||||||
// Create table
|
// Create table
|
||||||
op, err := adminClient.UpdateDatabaseDdl(ctx, &databasepb.UpdateDatabaseDdlRequest{
|
op, err := adminClient.UpdateDatabaseDdl(ctx, &databasepb.UpdateDatabaseDdlRequest{
|
||||||
@@ -266,11 +251,11 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
|||||||
Statements: []string{createStatement},
|
Statements: []string{createStatement},
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to start create table operation %s: %w", tableName, err)
|
t.Fatalf("unable to start create table operation %s: %s", tableName, err)
|
||||||
}
|
}
|
||||||
err = op.Wait(ctx)
|
err = op.Wait(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to create test table %s: %w", tableName, err)
|
t.Fatalf("unable to create test table %s: %s", tableName, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert test data
|
// Insert test data
|
||||||
@@ -284,7 +269,7 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
|||||||
return err
|
return err
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("unable to insert test data: %w", err)
|
t.Fatalf("unable to insert test data: %s", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -303,7 +288,7 @@ func setupSpannerTable(t *testing.T, ctx context.Context, adminClient *database.
|
|||||||
if opErr != nil {
|
if opErr != nil {
|
||||||
t.Errorf("Teardown failed: %s", opErr)
|
t.Errorf("Teardown failed: %s", opErr)
|
||||||
}
|
}
|
||||||
}, nil
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// setupSpannerGraph creates a graph and inserts data into it.
|
// setupSpannerGraph creates a graph and inserts data into it.
|
||||||
|
|||||||
Reference in New Issue
Block a user