mirror of
https://github.com/googleapis/genai-toolbox.git
synced 2026-01-13 01:18:19 -05:00
Compare commits
39 Commits
invoke-int
...
dishapraka
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b6c6644769 | ||
|
|
e17c40caa6 | ||
|
|
8f85d84cd3 | ||
|
|
6209e2b00e | ||
|
|
66bdcc03df | ||
|
|
f4cf486fa9 | ||
|
|
94eba1fd0a | ||
|
|
c1acd1a3e0 | ||
|
|
acb205ca47 | ||
|
|
86eecc356d | ||
|
|
63adc78bea | ||
|
|
2c4d73b77b | ||
|
|
0f6d52a225 | ||
|
|
43c4262f94 | ||
|
|
94e19d87e5 | ||
|
|
3efce3d2b4 | ||
|
|
21ac98bc06 | ||
|
|
5ff0696706 | ||
|
|
cec88ec8cb | ||
|
|
ff8a7fe472 | ||
|
|
63c54d0453 | ||
|
|
4e7b8a01b5 | ||
|
|
1a44c671ec | ||
|
|
1536d1fdab | ||
|
|
36c658472c | ||
|
|
fca879ad5b | ||
|
|
c4a22b8d3b | ||
|
|
af72637009 | ||
|
|
dcc3dabdea | ||
|
|
564adbef27 | ||
|
|
2d5a93e312 | ||
|
|
5aed4e136d | ||
|
|
3be9b7b3bd | ||
|
|
4dff01f98a | ||
|
|
0e04381ed7 | ||
|
|
1afd9a95da | ||
|
|
95efdc847f | ||
|
|
73a96b1b63 | ||
|
|
3553bf0ccf |
@@ -194,6 +194,26 @@ steps:
|
||||
dataplex \
|
||||
dataplex
|
||||
|
||||
- id: "dataform"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
entrypoint: /bin/bash
|
||||
env:
|
||||
- "GOPATH=/gopath"
|
||||
secretEnv: ["CLIENT_ID"]
|
||||
volumes:
|
||||
- name: "go"
|
||||
path: "/gopath"
|
||||
args:
|
||||
- -c
|
||||
- |
|
||||
apt-get update && apt-get install -y npm && \
|
||||
npm install -g @dataform/cli && \
|
||||
.ci/test_with_coverage.sh \
|
||||
"Dataform" \
|
||||
dataform \
|
||||
dataform
|
||||
|
||||
- id: "postgres"
|
||||
name: golang:1
|
||||
waitFor: ["compile-test-binary"]
|
||||
@@ -517,6 +537,8 @@ steps:
|
||||
- "FIRESTORE_PROJECT=$PROJECT_ID"
|
||||
- "SERVICE_ACCOUNT_EMAIL=$SERVICE_ACCOUNT_EMAIL"
|
||||
- "LOOKER_VERIFY_SSL=$_LOOKER_VERIFY_SSL"
|
||||
- "LOOKER_PROJECT=$_LOOKER_PROJECT"
|
||||
- "LOOKER_LOCATION=$_LOOKER_LOCATION"
|
||||
secretEnv:
|
||||
[
|
||||
"CLIENT_ID",
|
||||
@@ -804,6 +826,8 @@ substitutions:
|
||||
_DGRAPHURL: "https://play.dgraph.io"
|
||||
_COUCHBASE_BUCKET: "couchbase-bucket"
|
||||
_COUCHBASE_SCOPE: "couchbase-scope"
|
||||
_LOOKER_LOCATION: "us"
|
||||
_LOOKER_PROJECT: "149671255749"
|
||||
_LOOKER_VERIFY_SSL: "true"
|
||||
_TIDB_HOST: 127.0.0.1
|
||||
_TIDB_PORT: "4000"
|
||||
|
||||
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,12 +1,10 @@
|
||||
## Description
|
||||
|
||||
---
|
||||
> Should include a concise description of the changes (bug or feature), it's
|
||||
> impact, along with a summary of the solution
|
||||
|
||||
## PR Checklist
|
||||
|
||||
---
|
||||
> Thank you for opening a Pull Request! Before submitting your PR, there are a
|
||||
> few things you can do to make sure it goes smoothly:
|
||||
|
||||
@@ -14,7 +12,7 @@
|
||||
[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
|
||||
- [ ] Make sure to open an issue as a
|
||||
[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
|
||||
before writing your code! That way we can discuss the change, evaluate
|
||||
before writing your code! That way we can discuss the change, evaluate
|
||||
designs, and agree on the general idea
|
||||
- [ ] Ensure the tests and linter pass
|
||||
- [ ] Code coverage does not decrease (if any source code was changed)
|
||||
|
||||
2
.github/release-please.yml
vendored
2
.github/release-please.yml
vendored
@@ -37,4 +37,4 @@ extraFiles: [
|
||||
"docs/en/how-to/connect-ide/postgres_mcp.md",
|
||||
"docs/en/how-to/connect-ide/neo4j_mcp.md",
|
||||
"docs/en/how-to/connect-ide/sqlite_mcp.md",
|
||||
]
|
||||
]
|
||||
@@ -37,7 +37,7 @@ jobs:
|
||||
runs-on: 'ubuntu-latest'
|
||||
|
||||
steps:
|
||||
- uses: 'actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b' # v7
|
||||
- uses: 'actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd' # v8
|
||||
with:
|
||||
script: |-
|
||||
// parse test names
|
||||
|
||||
84
.github/workflows/deploy_dev_docs.yaml
vendored
Normal file
84
.github/workflows/deploy_dev_docs.yaml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: "Deploy In-development docs"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'github/workflows/docs**'
|
||||
- '.hugo/**'
|
||||
|
||||
# Allow triggering manually.
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-24.04
|
||||
defaults:
|
||||
run:
|
||||
working-directory: .hugo
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
with:
|
||||
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||||
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@75d2e84710de30f6ff7268e08f310b60ef14033f # v3
|
||||
with:
|
||||
hugo-version: "0.145.0"
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@0400d5f644dc74513175e3cd8d07132dd4860809 # v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
|
||||
- run: npm ci
|
||||
- run: hugo --minify
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/dev
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Create Staging Directory
|
||||
run: |
|
||||
mkdir staging
|
||||
mv public staging/dev
|
||||
mv staging/dev/releases.releases staging/releases.releases
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@4f9cc6602d3f66b9c108549d475ec49e8ef4d45e # v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: ./.hugo/staging
|
||||
publish_branch: versioned-gh-pages
|
||||
keep_files: true
|
||||
commit_message: "deploy: ${{ github.event.head_commit.message }}"
|
||||
104
.github/workflows/deploy_previous_version_docs.yaml
vendored
Normal file
104
.github/workflows/deploy_previous_version_docs.yaml
vendored
Normal file
@@ -0,0 +1,104 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: "Deploy Previous Version Docs"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
version_tag:
|
||||
description: 'The old version tag to build docs for (e.g., v0.15.0)'
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
build_and_deploy:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
steps:
|
||||
- name: Checkout main branch (for latest templates and theme)
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: 'main'
|
||||
submodules: 'recursive'
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Checkout old content from tag into a temporary directory
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.inputs.version_tag }}
|
||||
path: 'old_version_source' # Checkout into a temp subdir
|
||||
# Sparse checkout to only get the content directory
|
||||
sparse-checkout: |
|
||||
docs
|
||||
|
||||
- name: Replace content with old version
|
||||
run: |
|
||||
# Remove the current content directory from the main branch checkout
|
||||
rm -rf docs/
|
||||
# Move the old content directory into place
|
||||
mv ./old_version_source/docs docs
|
||||
|
||||
- name: Setup Hugo and Node
|
||||
uses: peaceiris/actions-hugo@v3
|
||||
with:
|
||||
hugo-version: "0.145.0"
|
||||
extended: true
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
working-directory: .hugo
|
||||
|
||||
- name: Build Hugo Site for Archived Version
|
||||
run: hugo --minify
|
||||
working-directory: .hugo
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/${{ github.event.inputs.version_tag }}/
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy to gh-pages
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
publish_branch: versioned-gh-pages
|
||||
destination_dir: ./${{ github.event.inputs.version_tag }}
|
||||
keep_files: true
|
||||
allow_empty_commit: true
|
||||
commit_message: "docs(backport): deploy docs for ${{ github.event.inputs.version_tag }}"
|
||||
|
||||
- name: Clean Build Directory
|
||||
run: rm -rf .hugo/public
|
||||
|
||||
- name: Build Hugo Site
|
||||
run: hugo --minify
|
||||
working-directory: .hugo
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy to root
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
publish_branch: versioned-gh-pages
|
||||
keep_files: true
|
||||
allow_empty_commit: true
|
||||
commit_message: "deploy: docs to root for ${{ github.event.inputs.version_tag }}"
|
||||
86
.github/workflows/deploy_versioned_docs.yaml
vendored
Normal file
86
.github/workflows/deploy_versioned_docs.yaml
vendored
Normal file
@@ -0,0 +1,86 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
name: "Deploy Versioned Docs"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- name: Checkout Code at Tag
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: ${{ github.event.release.tag_name }}
|
||||
|
||||
- name: Get Version from Release Tag
|
||||
run: echo "VERSION=${{ github.event.release.tag_name }}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup Hugo
|
||||
uses: peaceiris/actions-hugo@v3
|
||||
with:
|
||||
hugo-version: "0.145.0"
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Install Dependencies
|
||||
run: npm ci
|
||||
working-directory: .hugo
|
||||
|
||||
- name: Build Hugo Site
|
||||
run: hugo --minify
|
||||
working-directory: .hugo
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/${{ env.VERSION }}/
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
publish_branch: versioned-gh-pages
|
||||
destination_dir: ./${{ env.VERSION }}
|
||||
keep_files: true
|
||||
commit_message: "deploy: docs for ${{ env.VERSION }}"
|
||||
|
||||
- name: Clean Build Directory
|
||||
run: rm -rf .hugo/public
|
||||
|
||||
- name: Build Hugo Site
|
||||
run: hugo --minify
|
||||
working-directory: .hugo
|
||||
env:
|
||||
HUGO_BASEURL: https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/
|
||||
HUGO_RELATIVEURLS: false
|
||||
|
||||
- name: Deploy to root
|
||||
uses: peaceiris/actions-gh-pages@v4
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
publish_branch: versioned-gh-pages
|
||||
keep_files: true
|
||||
allow_empty_commit: true
|
||||
commit_message: "deploy: docs to root for ${{ env.VERSION }}"
|
||||
2
.github/workflows/docs_deploy.yaml
vendored
2
.github/workflows/docs_deploy.yaml
vendored
@@ -50,7 +50,7 @@ jobs:
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
|
||||
6
.github/workflows/docs_preview_clean.yaml
vendored
6
.github/workflows/docs_preview_clean.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5
|
||||
with:
|
||||
ref: gh-pages
|
||||
ref: versioned-gh-pages
|
||||
|
||||
- name: Remove Preview
|
||||
run: |
|
||||
@@ -48,7 +48,7 @@ jobs:
|
||||
git push
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
@@ -56,4 +56,4 @@ jobs:
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
body: "🧨 Preview deployments removed."
|
||||
})
|
||||
})
|
||||
|
||||
5
.github/workflows/docs_preview_deploy.yaml
vendored
5
.github/workflows/docs_preview_deploy.yaml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
extended: true
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
@@ -86,11 +86,12 @@ jobs:
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_dir: .hugo/public
|
||||
publish_branch: versioned-gh-pages
|
||||
destination_dir: ./previews/PR-${{ github.event.number }}
|
||||
commit_message: "stage: PR-${{ github.event.number }}: ${{ github.event.head_commit.message }}"
|
||||
|
||||
- name: Comment
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8
|
||||
with:
|
||||
script: |
|
||||
github.rest.issues.createComment({
|
||||
|
||||
6
.github/workflows/lint.yaml
vendored
6
.github/workflows/lint.yaml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR Label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -51,9 +51,9 @@ jobs:
|
||||
console.log('Failed to remove label. Another job may have already removed it!');
|
||||
}
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.25"
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
|
||||
6
.github/workflows/tests.yaml
vendored
6
.github/workflows/tests.yaml
vendored
@@ -41,7 +41,7 @@ jobs:
|
||||
steps:
|
||||
- name: Remove PR label
|
||||
if: "${{ github.event.action == 'labeled' && github.event.label.name == 'tests: run' }}"
|
||||
uses: actions/github-script@f28e40c7f34bde8b3046d885e986cb6290c5673b # v7.1.0
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
script: |
|
||||
@@ -57,9 +57,9 @@ jobs:
|
||||
}
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5.5.0
|
||||
uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6.0.0
|
||||
with:
|
||||
go-version: "1.22"
|
||||
go-version: "1.24"
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
title = 'MCP Toolbox for Databases'
|
||||
relativeURLs = true
|
||||
relativeURLs = false
|
||||
|
||||
languageCode = 'en-us'
|
||||
defaultContentLanguage = "en"
|
||||
@@ -36,6 +36,7 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
github_project_repo = "https://github.com/googleapis/genai-toolbox"
|
||||
github_subdir = "docs"
|
||||
offlineSearch = true
|
||||
version_menu = "Releases"
|
||||
[params.ui]
|
||||
ul_show = 100
|
||||
showLightDarkModeMenu = true
|
||||
@@ -43,6 +44,50 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
sidebar_menu_foldable = true
|
||||
sidebar_menu_compact = false
|
||||
|
||||
[[params.versions]]
|
||||
version = "Dev"
|
||||
url = "https://googleapis.github.io/genai-toolbox/dev/"
|
||||
|
||||
# Add a new version block here before every release
|
||||
# The order of versions in this file is mirrored into the dropdown
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.16.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.16.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.15.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.15.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.14.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.14.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.13.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.13.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.12.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.12.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.11.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.11.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.10.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.10.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.9.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.9.0/"
|
||||
|
||||
[[params.versions]]
|
||||
version = "v0.8.0"
|
||||
url = "https://googleapis.github.io/genai-toolbox/v0.8.0/"
|
||||
|
||||
|
||||
[[menu.main]]
|
||||
name = "GitHub"
|
||||
weight = 50
|
||||
@@ -67,6 +112,13 @@ ignoreFiles = ["quickstart/shared", "quickstart/python", "quickstart/js", "quick
|
||||
baseName = "llms-full"
|
||||
isPlainText = true
|
||||
root = true
|
||||
[outputFormats.releases]
|
||||
baseName = 'releases'
|
||||
isPlainText = true
|
||||
mediaType = 'text/releases'
|
||||
|
||||
[mediaTypes."text/releases"]
|
||||
suffixes = ["releases"]
|
||||
|
||||
[outputs]
|
||||
home = ["HTML", "RSS", "LLMS", "LLMS-FULL"]
|
||||
home = ["HTML", "RSS", "LLMS", "LLMS-FULL", "releases"]
|
||||
|
||||
9
.hugo/layouts/_default/home.releases.releases
Normal file
9
.hugo/layouts/_default/home.releases.releases
Normal file
@@ -0,0 +1,9 @@
|
||||
{{ if .Site.Params.versions -}}
|
||||
{{ $path := "" -}}
|
||||
{{ if .Site.Params.version_menu_pagelinks -}}
|
||||
{{ $path = .Page.RelPermalink -}}
|
||||
{{ end -}}
|
||||
{{ range .Site.Params.versions -}}
|
||||
<a class="dropdown-item" href="{{ .url }}{{ $path }}">{{ .version }}</a>
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
1
.hugo/layouts/partials/hooks/head-end.html
Normal file
1
.hugo/layouts/partials/hooks/head-end.html
Normal file
@@ -0,0 +1 @@
|
||||
<script src='{{ .Site.BaseURL }}js/w3.js' type="application/x-javascript"></script>
|
||||
28
.hugo/layouts/partials/navbar-version-selector.html
Normal file
28
.hugo/layouts/partials/navbar-version-selector.html
Normal file
@@ -0,0 +1,28 @@
|
||||
{{ if .Site.Params.versions -}}
|
||||
<a class="nav-link dropdown-toggle" href="#" id="navbarDropdown" role="button" data-bs-toggle="dropdown" aria-expanded="false">
|
||||
{{ .Site.Params.version_menu }}
|
||||
</a>
|
||||
<div class="dropdown-menu" aria-labelledby="navbarDropdown">
|
||||
<div w3-include-html="/genai-toolbox/releases.releases" w3-include-html-default='<a class="dropdown-item" href="/genai-toolbox/dev/">Dev</a>'></div>
|
||||
<script>
|
||||
function deepRouteLink() {
|
||||
const currentPath = window.location.pathname;
|
||||
const versionRegex = /(\/genai-toolbox\/(v\d+\.\d+\.\d+|dev)\/)/;
|
||||
const versionMatch = currentPath.match(versionRegex);
|
||||
|
||||
if (!versionMatch) {
|
||||
return;
|
||||
}
|
||||
|
||||
const deepPath = currentPath.substring(versionMatch[0].length);
|
||||
const versionLinks = document.querySelectorAll('.dropdown-menu[aria-labelledby="navbarDropdown"] .dropdown-item');
|
||||
|
||||
versionLinks.forEach(link => {
|
||||
link.href = link.href + deepPath;
|
||||
});
|
||||
}
|
||||
|
||||
w3.includeHTML(deepRouteLink);
|
||||
</script>
|
||||
</div>
|
||||
{{ end -}}
|
||||
405
.hugo/static/js/w3.js
Normal file
405
.hugo/static/js/w3.js
Normal file
@@ -0,0 +1,405 @@
|
||||
/* W3.JS 1.04 April 2019 by w3schools.com */
|
||||
"use strict";
|
||||
var w3 = {};
|
||||
w3.hide = function (sel) {
|
||||
w3.hideElements(w3.getElements(sel));
|
||||
};
|
||||
w3.hideElements = function (elements) {
|
||||
var i, l = elements.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.hideElement(elements[i]);
|
||||
}
|
||||
};
|
||||
w3.hideElement = function (element) {
|
||||
w3.styleElement(element, "display", "none");
|
||||
};
|
||||
w3.show = function (sel, a) {
|
||||
var elements = w3.getElements(sel);
|
||||
if (a) {w3.hideElements(elements);}
|
||||
w3.showElements(elements);
|
||||
};
|
||||
w3.showElements = function (elements) {
|
||||
var i, l = elements.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.showElement(elements[i]);
|
||||
}
|
||||
};
|
||||
w3.showElement = function (element) {
|
||||
w3.styleElement(element, "display", "block");
|
||||
};
|
||||
w3.addStyle = function (sel, prop, val) {
|
||||
w3.styleElements(w3.getElements(sel), prop, val);
|
||||
};
|
||||
w3.styleElements = function (elements, prop, val) {
|
||||
var i, l = elements.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.styleElement(elements[i], prop, val);
|
||||
}
|
||||
};
|
||||
w3.styleElement = function (element, prop, val) {
|
||||
element.style.setProperty(prop, val);
|
||||
};
|
||||
w3.toggleShow = function (sel) {
|
||||
var i, x = w3.getElements(sel), l = x.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
if (x[i].style.display == "none") {
|
||||
w3.styleElement(x[i], "display", "block");
|
||||
} else {
|
||||
w3.styleElement(x[i], "display", "none");
|
||||
}
|
||||
}
|
||||
};
|
||||
w3.addClass = function (sel, name) {
|
||||
w3.addClassElements(w3.getElements(sel), name);
|
||||
};
|
||||
w3.addClassElements = function (elements, name) {
|
||||
var i, l = elements.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.addClassElement(elements[i], name);
|
||||
}
|
||||
};
|
||||
w3.addClassElement = function (element, name) {
|
||||
var i, arr1, arr2;
|
||||
arr1 = element.className.split(" ");
|
||||
arr2 = name.split(" ");
|
||||
for (i = 0; i < arr2.length; i++) {
|
||||
if (arr1.indexOf(arr2[i]) == -1) {element.className += " " + arr2[i];}
|
||||
}
|
||||
};
|
||||
w3.removeClass = function (sel, name) {
|
||||
w3.removeClassElements(w3.getElements(sel), name);
|
||||
};
|
||||
w3.removeClassElements = function (elements, name) {
|
||||
var i, l = elements.length, arr1, arr2, j;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.removeClassElement(elements[i], name);
|
||||
}
|
||||
};
|
||||
w3.removeClassElement = function (element, name) {
|
||||
var i, arr1, arr2;
|
||||
arr1 = element.className.split(" ");
|
||||
arr2 = name.split(" ");
|
||||
for (i = 0; i < arr2.length; i++) {
|
||||
while (arr1.indexOf(arr2[i]) > -1) {
|
||||
arr1.splice(arr1.indexOf(arr2[i]), 1);
|
||||
}
|
||||
}
|
||||
element.className = arr1.join(" ");
|
||||
};
|
||||
w3.toggleClass = function (sel, c1, c2) {
|
||||
w3.toggleClassElements(w3.getElements(sel), c1, c2);
|
||||
};
|
||||
w3.toggleClassElements = function (elements, c1, c2) {
|
||||
var i, l = elements.length;
|
||||
for (i = 0; i < l; i++) {
|
||||
w3.toggleClassElement(elements[i], c1, c2);
|
||||
}
|
||||
};
|
||||
w3.toggleClassElement = function (element, c1, c2) {
|
||||
var t1, t2, t1Arr, t2Arr, j, arr, allPresent;
|
||||
t1 = (c1 || "");
|
||||
t2 = (c2 || "");
|
||||
t1Arr = t1.split(" ");
|
||||
t2Arr = t2.split(" ");
|
||||
arr = element.className.split(" ");
|
||||
if (t2Arr.length == 0) {
|
||||
allPresent = true;
|
||||
for (j = 0; j < t1Arr.length; j++) {
|
||||
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
|
||||
}
|
||||
if (allPresent) {
|
||||
w3.removeClassElement(element, t1);
|
||||
} else {
|
||||
w3.addClassElement(element, t1);
|
||||
}
|
||||
} else {
|
||||
allPresent = true;
|
||||
for (j = 0; j < t1Arr.length; j++) {
|
||||
if (arr.indexOf(t1Arr[j]) == -1) {allPresent = false;}
|
||||
}
|
||||
if (allPresent) {
|
||||
w3.removeClassElement(element, t1);
|
||||
w3.addClassElement(element, t2);
|
||||
} else {
|
||||
w3.removeClassElement(element, t2);
|
||||
w3.addClassElement(element, t1);
|
||||
}
|
||||
}
|
||||
};
|
||||
w3.getElements = function (id) {
|
||||
if (typeof id == "object") {
|
||||
return [id];
|
||||
} else {
|
||||
return document.querySelectorAll(id);
|
||||
}
|
||||
};
|
||||
w3.filterHTML = function(id, sel, filter) {
|
||||
var a, b, c, i, ii, iii, hit;
|
||||
a = w3.getElements(id);
|
||||
for (i = 0; i < a.length; i++) {
|
||||
b = a[i].querySelectorAll(sel);
|
||||
for (ii = 0; ii < b.length; ii++) {
|
||||
hit = 0;
|
||||
if (b[ii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
|
||||
hit = 1;
|
||||
}
|
||||
c = b[ii].getElementsByTagName("*");
|
||||
for (iii = 0; iii < c.length; iii++) {
|
||||
if (c[iii].innerText.toUpperCase().indexOf(filter.toUpperCase()) > -1) {
|
||||
hit = 1;
|
||||
}
|
||||
}
|
||||
if (hit == 1) {
|
||||
b[ii].style.display = "";
|
||||
} else {
|
||||
b[ii].style.display = "none";
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
w3.sortHTML = function(id, sel, sortvalue) {
|
||||
var a, b, i, ii, y, bytt, v1, v2, cc, j;
|
||||
a = w3.getElements(id);
|
||||
for (i = 0; i < a.length; i++) {
|
||||
for (j = 0; j < 2; j++) {
|
||||
cc = 0;
|
||||
y = 1;
|
||||
while (y == 1) {
|
||||
y = 0;
|
||||
b = a[i].querySelectorAll(sel);
|
||||
for (ii = 0; ii < (b.length - 1); ii++) {
|
||||
bytt = 0;
|
||||
if (sortvalue) {
|
||||
v1 = b[ii].querySelector(sortvalue).innerText;
|
||||
v2 = b[ii + 1].querySelector(sortvalue).innerText;
|
||||
} else {
|
||||
v1 = b[ii].innerText;
|
||||
v2 = b[ii + 1].innerText;
|
||||
}
|
||||
v1 = v1.toLowerCase();
|
||||
v2 = v2.toLowerCase();
|
||||
if ((j == 0 && (v1 > v2)) || (j == 1 && (v1 < v2))) {
|
||||
bytt = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (bytt == 1) {
|
||||
b[ii].parentNode.insertBefore(b[ii + 1], b[ii]);
|
||||
y = 1;
|
||||
cc++;
|
||||
}
|
||||
}
|
||||
if (cc > 0) {break;}
|
||||
}
|
||||
}
|
||||
};
|
||||
w3.slideshow = function (sel, ms, func) {
|
||||
var i, ss, x = w3.getElements(sel), l = x.length;
|
||||
ss = {};
|
||||
ss.current = 1;
|
||||
ss.x = x;
|
||||
ss.ondisplaychange = func;
|
||||
if (!isNaN(ms) || ms == 0) {
|
||||
ss.milliseconds = ms;
|
||||
} else {
|
||||
ss.milliseconds = 1000;
|
||||
}
|
||||
ss.start = function() {
|
||||
ss.display(ss.current)
|
||||
if (ss.ondisplaychange) {ss.ondisplaychange();}
|
||||
if (ss.milliseconds > 0) {
|
||||
window.clearTimeout(ss.timeout);
|
||||
ss.timeout = window.setTimeout(ss.next, ss.milliseconds);
|
||||
}
|
||||
};
|
||||
ss.next = function() {
|
||||
ss.current += 1;
|
||||
if (ss.current > ss.x.length) {ss.current = 1;}
|
||||
ss.start();
|
||||
};
|
||||
ss.previous = function() {
|
||||
ss.current -= 1;
|
||||
if (ss.current < 1) {ss.current = ss.x.length;}
|
||||
ss.start();
|
||||
};
|
||||
ss.display = function (n) {
|
||||
w3.styleElements(ss.x, "display", "none");
|
||||
w3.styleElement(ss.x[n - 1], "display", "block");
|
||||
}
|
||||
ss.start();
|
||||
return ss;
|
||||
};
|
||||
w3.includeHTML = function(cb) {
|
||||
var z, i, elmnt, file, xhttp;
|
||||
z = document.getElementsByTagName("*");
|
||||
for (i = 0; i < z.length; i++) {
|
||||
elmnt = z[i];
|
||||
file = elmnt.getAttribute("w3-include-html");
|
||||
if (file) {
|
||||
xhttp = new XMLHttpRequest();
|
||||
xhttp.onreadystatechange = function() {
|
||||
if (this.readyState == 4) {
|
||||
if (this.status == 200) {elmnt.innerHTML = this.responseText;}
|
||||
if (this.status == 404) {
|
||||
if (elmnt.getAttribute("w3-include-html-default")) {
|
||||
elmnt.innerHTML = elmnt.getAttribute("w3-include-html-default");
|
||||
}
|
||||
else { elmnt.innerHTML = "Page not found."; }
|
||||
}
|
||||
elmnt.removeAttribute("w3-include-html");
|
||||
w3.includeHTML(cb);
|
||||
}
|
||||
}
|
||||
xhttp.open("GET", file, true);
|
||||
xhttp.send();
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (cb) cb();
|
||||
};
|
||||
w3.getHttpData = function (file, func) {
|
||||
w3.http(file, function () {
|
||||
if (this.readyState == 4 && this.status == 200) {
|
||||
func(this.responseText);
|
||||
}
|
||||
});
|
||||
};
|
||||
w3.getHttpObject = function (file, func) {
|
||||
w3.http(file, function () {
|
||||
if (this.readyState == 4 && this.status == 200) {
|
||||
func(JSON.parse(this.responseText));
|
||||
}
|
||||
});
|
||||
};
|
||||
w3.displayHttp = function (id, file) {
|
||||
w3.http(file, function () {
|
||||
if (this.readyState == 4 && this.status == 200) {
|
||||
w3.displayObject(id, JSON.parse(this.responseText));
|
||||
}
|
||||
});
|
||||
};
|
||||
w3.http = function (target, readyfunc, xml, method) {
|
||||
var httpObj;
|
||||
if (!method) {method = "GET"; }
|
||||
if (window.XMLHttpRequest) {
|
||||
httpObj = new XMLHttpRequest();
|
||||
} else if (window.ActiveXObject) {
|
||||
httpObj = new ActiveXObject("Microsoft.XMLHTTP");
|
||||
}
|
||||
if (httpObj) {
|
||||
if (readyfunc) {httpObj.onreadystatechange = readyfunc;}
|
||||
httpObj.open(method, target, true);
|
||||
httpObj.send(xml);
|
||||
}
|
||||
};
|
||||
w3.getElementsByAttribute = function (x, att) {
|
||||
var arr = [], arrCount = -1, i, l, y = x.getElementsByTagName("*"), z = att.toUpperCase();
|
||||
l = y.length;
|
||||
for (i = -1; i < l; i += 1) {
|
||||
if (i == -1) {y[i] = x;}
|
||||
if (y[i].getAttribute(z) !== null) {arrCount += 1; arr[arrCount] = y[i];}
|
||||
}
|
||||
return arr;
|
||||
};
|
||||
w3.dataObject = {},
|
||||
w3.displayObject = function (id, data) {
|
||||
var htmlObj, htmlTemplate, html, arr = [], a, l, rowClone, x, j, i, ii, cc, repeat, repeatObj, repeatX = "";
|
||||
htmlObj = document.getElementById(id);
|
||||
htmlTemplate = init_template(id, htmlObj);
|
||||
html = htmlTemplate.cloneNode(true);
|
||||
arr = w3.getElementsByAttribute(html, "w3-repeat");
|
||||
l = arr.length;
|
||||
for (j = (l - 1); j >= 0; j -= 1) {
|
||||
cc = arr[j].getAttribute("w3-repeat").split(" ");
|
||||
if (cc.length == 1) {
|
||||
repeat = cc[0];
|
||||
} else {
|
||||
repeatX = cc[0];
|
||||
repeat = cc[2];
|
||||
}
|
||||
arr[j].removeAttribute("w3-repeat");
|
||||
repeatObj = data[repeat];
|
||||
if (repeatObj && typeof repeatObj == "object" && repeatObj.length != "undefined") {
|
||||
i = 0;
|
||||
for (x in repeatObj) {
|
||||
i += 1;
|
||||
rowClone = arr[j];
|
||||
rowClone = w3_replace_curly(rowClone, "element", repeatX, repeatObj[x]);
|
||||
a = rowClone.attributes;
|
||||
for (ii = 0; ii < a.length; ii += 1) {
|
||||
a[ii].value = w3_replace_curly(a[ii], "attribute", repeatX, repeatObj[x]).value;
|
||||
}
|
||||
(i === repeatObj.length) ? arr[j].parentNode.replaceChild(rowClone, arr[j]) : arr[j].parentNode.insertBefore(rowClone, arr[j]);
|
||||
}
|
||||
} else {
|
||||
console.log("w3-repeat must be an array. " + repeat + " is not an array.");
|
||||
continue;
|
||||
}
|
||||
}
|
||||
html = w3_replace_curly(html, "element");
|
||||
htmlObj.parentNode.replaceChild(html, htmlObj);
|
||||
function init_template(id, obj) {
|
||||
var template;
|
||||
template = obj.cloneNode(true);
|
||||
if (w3.dataObject.hasOwnProperty(id)) {return w3.dataObject[id];}
|
||||
w3.dataObject[id] = template;
|
||||
return template;
|
||||
}
|
||||
function w3_replace_curly(elmnt, typ, repeatX, x) {
|
||||
var value, rowClone, pos1, pos2, originalHTML, lookFor, lookForARR = [], i, cc, r;
|
||||
rowClone = elmnt.cloneNode(true);
|
||||
pos1 = 0;
|
||||
while (pos1 > -1) {
|
||||
originalHTML = (typ == "attribute") ? rowClone.value : rowClone.innerHTML;
|
||||
pos1 = originalHTML.indexOf("{{", pos1);
|
||||
if (pos1 === -1) {break;}
|
||||
pos2 = originalHTML.indexOf("}}", pos1 + 1);
|
||||
lookFor = originalHTML.substring(pos1 + 2, pos2);
|
||||
lookForARR = lookFor.split("||");
|
||||
value = undefined;
|
||||
for (i = 0; i < lookForARR.length; i += 1) {
|
||||
lookForARR[i] = lookForARR[i].replace(/^\s+|\s+$/gm, ''); //trim
|
||||
if (x) {value = x[lookForARR[i]];}
|
||||
if (value == undefined && data) {value = data[lookForARR[i]];}
|
||||
if (value == undefined) {
|
||||
cc = lookForARR[i].split(".");
|
||||
if (cc[0] == repeatX) {value = x[cc[1]]; }
|
||||
}
|
||||
if (value == undefined) {
|
||||
if (lookForARR[i] == repeatX) {value = x;}
|
||||
}
|
||||
if (value == undefined) {
|
||||
if (lookForARR[i].substr(0, 1) == '"') {
|
||||
value = lookForARR[i].replace(/"/g, "");
|
||||
} else if (lookForARR[i].substr(0,1) == "'") {
|
||||
value = lookForARR[i].replace(/'/g, "");
|
||||
}
|
||||
}
|
||||
if (value != undefined) {break;}
|
||||
}
|
||||
if (value != undefined) {
|
||||
r = "{{" + lookFor + "}}";
|
||||
if (typ == "attribute") {
|
||||
rowClone.value = rowClone.value.replace(r, value);
|
||||
} else {
|
||||
w3_replace_html(rowClone, r, value);
|
||||
}
|
||||
}
|
||||
pos1 = pos1 + 1;
|
||||
}
|
||||
return rowClone;
|
||||
}
|
||||
function w3_replace_html(a, r, result) {
|
||||
var b, l, i, a, x, j;
|
||||
if (a.hasAttributes()) {
|
||||
b = a.attributes;
|
||||
l = b.length;
|
||||
for (i = 0; i < l; i += 1) {
|
||||
if (b[i].value.indexOf(r) > -1) {b[i].value = b[i].value.replace(r, result);}
|
||||
}
|
||||
}
|
||||
x = a.getElementsByTagName("*");
|
||||
l = x.length;
|
||||
a.innerHTML = a.innerHTML.replace(r, result);
|
||||
}
|
||||
};
|
||||
32
DEVELOPER.md
32
DEVELOPER.md
@@ -134,6 +134,19 @@ go test -race -v ./...
|
||||
```shell
|
||||
go test -race -v ./tests/alloydbpg
|
||||
```
|
||||
1. **Timeout:** The integration test should have a timeout on the server.
|
||||
Look for code like this:
|
||||
```go
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
defer cancel()
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile, args...)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
```
|
||||
Be sure to set the timeout to a reasonable value for your tests.
|
||||
|
||||
#### Running on Pull Requests
|
||||
|
||||
@@ -229,6 +242,25 @@ Follow these steps to preview documentation changes locally using a Hugo server:
|
||||
|
||||
### Previewing Documentation on Pull Requests
|
||||
|
||||
### Document Versioning Setup
|
||||
|
||||
There are 3 GHA workflows we use to achieve document versioning:
|
||||
|
||||
1. **Deploy In-development docs:**
|
||||
This workflow is run on every commit merged into the main branch. It deploys the built site to the `/dev/` subdirectory for the in-development documentation.
|
||||
|
||||
1. **Deploy Versioned Docs:**
|
||||
When a new GitHub Release is published, it performs two deployments based on the new release tag.
|
||||
One to the new version subdirectory and one to the root directory of the versioned-gh-pages branch.
|
||||
|
||||
**Note:** Before the release PR from release-please is merged, add the newest version into the hugo.toml file.
|
||||
|
||||
1. **Deploy Previous Version Docs:**
|
||||
This is a manual workflow, started from the GitHub Actions UI.
|
||||
To rebuild and redeploy documentation for an already released version that were released before this new system was in place. This workflow can be started on the UI by providing the git version tag which you want to create the documentation for.
|
||||
The specific versioned subdirectory and the root docs are updated on the versioned-gh-pages branch.
|
||||
|
||||
|
||||
#### Contributors
|
||||
|
||||
Request a repo owner to run the preview deployment workflow on your PR. A
|
||||
|
||||
@@ -80,6 +80,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
|
||||
@@ -97,6 +98,7 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
|
||||
@@ -105,6 +107,9 @@ import (
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
|
||||
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
|
||||
|
||||
@@ -1244,6 +1244,7 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
mysql_config, _ := prebuiltconfigs.Get("mysql")
|
||||
mssql_config, _ := prebuiltconfigs.Get("mssql")
|
||||
looker_config, _ := prebuiltconfigs.Get("looker")
|
||||
lookerca_config, _ := prebuiltconfigs.Get("looker-conversational-analytics")
|
||||
postgresconfig, _ := prebuiltconfigs.Get("postgres")
|
||||
spanner_config, _ := prebuiltconfigs.Get("spanner")
|
||||
spannerpg_config, _ := prebuiltconfigs.Get("spanner-postgres")
|
||||
@@ -1327,6 +1328,9 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
t.Setenv("LOOKER_CLIENT_SECRET", "your_looker_client_secret")
|
||||
t.Setenv("LOOKER_VERIFY_SSL", "true")
|
||||
|
||||
t.Setenv("LOOKER_PROJECT", "your_project_id")
|
||||
t.Setenv("LOOKER_LOCATION", "us")
|
||||
|
||||
t.Setenv("SQLITE_DATABASE", "test.db")
|
||||
|
||||
t.Setenv("NEO4J_URI", "bolt://localhost:7687")
|
||||
@@ -1489,7 +1493,17 @@ func TestPrebuiltTools(t *testing.T) {
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_tools": tools.ToolsetConfig{
|
||||
Name: "looker_tools",
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element"},
|
||||
ToolNames: []string{"get_models", "get_explores", "get_dimensions", "get_measures", "get_filters", "get_parameters", "query", "query_sql", "query_url", "get_looks", "run_look", "make_look", "get_dashboards", "make_dashboard", "add_dashboard_element", "health_pulse", "health_analyze", "health_vacuum"},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "looker-conversational-analytics prebuilt tools",
|
||||
in: lookerca_config,
|
||||
wantToolset: server.ToolsetConfigs{
|
||||
"looker_conversational_analytics_tools": tools.ToolsetConfig{
|
||||
Name: "looker_conversational_analytics_tools",
|
||||
ToolNames: []string{"ask_data_insights", "get_models", "get_explores"},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -4,7 +4,7 @@ go 1.24.6
|
||||
|
||||
require (
|
||||
github.com/googleapis/mcp-toolbox-sdk-go v0.3.0
|
||||
google.golang.org/genai v1.25.0
|
||||
google.golang.org/genai v1.28.0
|
||||
)
|
||||
|
||||
require (
|
||||
|
||||
@@ -102,8 +102,8 @@ golang.org/x/time v0.12.0 h1:ScB/8o8olJvc+CQPWrK3fPZNfh7qgwCrY0zJmoEQLSE=
|
||||
golang.org/x/time v0.12.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg=
|
||||
google.golang.org/api v0.248.0 h1:hUotakSkcwGdYUqzCRc5yGYsg4wXxpkKlW5ryVqvC1Y=
|
||||
google.golang.org/api v0.248.0/go.mod h1:yAFUAF56Li7IuIQbTFoLwXTCI6XCFKueOlS7S9e4F9k=
|
||||
google.golang.org/genai v1.23.0 h1:0VkQPd1CVT5FbykwkWvnB7jq1d+PZFuVf0n57UyyOzs=
|
||||
google.golang.org/genai v1.23.0/go.mod h1:QPj5NGJw+3wEOHg+PrsWwJKvG6UC84ex5FR7qAYsN/M=
|
||||
google.golang.org/genai v1.28.0 h1:6qpUWFH3PkHPhxNnu3wjaCVJ6Jri1EIR7ks07f9IpIk=
|
||||
google.golang.org/genai v1.28.0/go.mod h1:7pAilaICJlQBonjKKJNhftDFv3SREhZcTe9F6nRcjbg=
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822 h1:rHWScKit0gvAPuOnu87KpaYtjK5zBMLcULh7gxkCXu4=
|
||||
google.golang.org/genproto v0.0.0-20250603155806-513f23925822/go.mod h1:HubltRL7rMh0LfnQPkMH4NPDFEWp0jw3vixw7jEM53s=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c h1:AtEkQdl5b6zsybXcbz00j1LwNodDuH6hVifIaNqk7NQ=
|
||||
|
||||
@@ -48,7 +48,7 @@ func main() {
|
||||
// Initialize Genkit
|
||||
g, err := genkit.Init(ctx,
|
||||
genkit.WithPlugins(&googlegenai.GoogleAI{}),
|
||||
genkit.WithDefaultModel("googleai/gemini-1.5-flash"),
|
||||
genkit.WithDefaultModel("googleai/gemini-2.0-flash"),
|
||||
)
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to init genkit: %v\n", err)
|
||||
|
||||
@@ -59,7 +59,7 @@ func main() {
|
||||
ctx := context.Background()
|
||||
|
||||
// Initialize the Google AI client (LLM).
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-1.5-flash"))
|
||||
llm, err := googleai.New(ctx, googleai.WithAPIKey(genaiKey), googleai.WithDefaultModel("gemini-2.0-flash"))
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to create Google AI client: %v", err)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
google-genai==1.38.0
|
||||
google-genai==1.42.0
|
||||
toolbox-core==0.5.2
|
||||
pytest==8.4.2
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
langchain==0.3.27
|
||||
langchain-google-vertexai==2.1.2
|
||||
langgraph==0.6.7
|
||||
langgraph==0.6.8
|
||||
toolbox-langchain==0.5.2
|
||||
pytest==8.4.2
|
||||
|
||||
@@ -84,6 +84,8 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* **Environment Variables:**
|
||||
* `BIGQUERY_PROJECT`: The GCP project ID.
|
||||
* `BIGQUERY_LOCATION`: (Optional) The dataset location.
|
||||
* `BIGQUERY_USE_CLIENT_OAUTH`: (Optional) If `true`, forwards the client's
|
||||
OAuth access token for authentication. Defaults to `false`.
|
||||
* **Permissions:**
|
||||
* **BigQuery User** (`roles/bigquery.user`) to execute queries and view
|
||||
metadata.
|
||||
@@ -132,6 +134,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `get_query_plan`: Provides information about how MySQL executes a SQL
|
||||
statement.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_tables_missing_unique_indexes`: Looks for tables that do not have
|
||||
primary or unique key contraint.
|
||||
* `list_table_fragmentation`: Displays table fragmentation in MySQL.
|
||||
|
||||
## Cloud SQL for MySQL Observability
|
||||
|
||||
@@ -354,6 +360,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
|
||||
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
|
||||
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
|
||||
* `LOOKER_USE_CLIENT_OAUTH`: Whether to use OAuth for authentication.
|
||||
* `LOOKER_SHOW_HIDDEN_MODELS`: Whether to show hidden models.
|
||||
* `LOOKER_SHOW_HIDDEN_EXPLORES`: Whether to show hidden explores.
|
||||
* `LOOKER_SHOW_HIDDEN_FIELDS`: Whether to show hidden fields.
|
||||
* **Permissions:**
|
||||
* A Looker account with permissions to access the desired models,
|
||||
explores, and data is required.
|
||||
@@ -373,6 +383,35 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `get_dashboards`: Searches for saved dashboards.
|
||||
* `make_dashboard`: Creates a new dashboard.
|
||||
* `add_dashboard_element`: Adds a tile to a dashboard.
|
||||
* `health_pulse`: Test the health of a Looker instance.
|
||||
* `health_analyze`: Analyze the LookML usage of a Looker instance.
|
||||
* `health_vacuum`: Suggest LookML elements that can be removed.
|
||||
|
||||
## Looker Conversational Analytics
|
||||
|
||||
* `--prebuilt` value: `looker-conversational-analytics`
|
||||
* **Environment Variables:**
|
||||
* `LOOKER_BASE_URL`: The URL of your Looker instance.
|
||||
* `LOOKER_CLIENT_ID`: The client ID for the Looker API.
|
||||
* `LOOKER_CLIENT_SECRET`: The client secret for the Looker API.
|
||||
* `LOOKER_VERIFY_SSL`: Whether to verify SSL certificates.
|
||||
* `LOOKER_USE_CLIENT_OAUTH`: Whether to use OAuth for authentication.
|
||||
* `LOOKER_PROJECT`: The GCP Project to use for Conversational Analytics.
|
||||
* `LOOKER_LOCATION`: The GCP Location to use for Conversational Analytics.
|
||||
* **Permissions:**
|
||||
* A Looker account with permissions to access the desired models,
|
||||
explores, and data is required.
|
||||
* **Looker Instance User** (`roles/looker.instanceUser`): IAM role to
|
||||
access Looker.
|
||||
* **Gemini for Google Cloud User** (`roles/cloudaicompanion.user`): IAM
|
||||
role to access Conversational Analytics.
|
||||
* **Gemini Data Analytics Stateless Chat User (Beta)**
|
||||
(`roles/geminidataanalytics.dataAgentStatelessUser`): IAM role to
|
||||
access Conversational Analytics.
|
||||
* **Tools:**
|
||||
* `ask_data_insights`: Ask a question of the data.
|
||||
* `get_models`: Retrieves the list of LookML models.
|
||||
* `get_explores`: Retrieves the list of explores in a model.
|
||||
|
||||
## Microsoft SQL Server
|
||||
|
||||
@@ -407,6 +446,10 @@ details on how to connect your AI tools (IDEs) to databases via Toolbox and MCP.
|
||||
* `list_tables`: Lists tables in the database.
|
||||
* `get_query_plan`: Provides information about how MySQL executes a SQL
|
||||
statement.
|
||||
* `list_active_queries`: Lists ongoing queries.
|
||||
* `list_tables_missing_unique_indexes`: Looks for tables that do not have
|
||||
primary or unique key contraint.
|
||||
* `list_table_fragmentation`: Displays table fragmentation in MySQL.
|
||||
|
||||
## OceanBase
|
||||
|
||||
|
||||
@@ -119,6 +119,7 @@ sources:
|
||||
kind: "bigquery"
|
||||
project: "my-project-id"
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
@@ -133,6 +134,7 @@ sources:
|
||||
project: "my-project-id"
|
||||
useClientOAuth: true
|
||||
# location: "US" # Optional: Specifies the location for query jobs.
|
||||
# writeMode: "allowed" # One of: allowed, blocked, protected. Defaults to "allowed".
|
||||
# allowedDatasets: # Optional: Restricts tool access to a specific list of datasets.
|
||||
# - "my_dataset_1"
|
||||
# - "other_project.my_dataset_2"
|
||||
@@ -145,5 +147,6 @@ sources:
|
||||
| kind | string | true | Must be "bigquery". |
|
||||
| project | string | true | Id of the Google Cloud project to use for billing and as the default project for BigQuery resources. |
|
||||
| location | string | false | Specifies the location (e.g., 'us', 'asia-northeast1') in which to run the query job. This location must match the location of any tables referenced in the query. Defaults to the table's location or 'US' if the location cannot be determined. [Learn More](https://cloud.google.com/bigquery/docs/locations) |
|
||||
| writeMode | string | false | Controls the write behavior for tools. `allowed` (default): All queries are permitted. `blocked`: Only `SELECT` statements are allowed for the `bigquery-execute-sql` tool. `protected`: Enables session-based execution where all tools associated with this source instance share the same [BigQuery session](https://cloud.google.com/bigquery/docs/sessions-intro). This allows for stateful operations using temporary tables (e.g., `CREATE TEMP TABLE`). For `bigquery-execute-sql`, `SELECT` statements can be used on all tables, but write operations are restricted to the session's temporary dataset. For tools like `bigquery-sql`, `bigquery-forecast`, and `bigquery-analyze-contribution`, the `writeMode` restrictions do not apply, but they will operate within the shared session. **Note:** The `protected` mode cannot be used with `useClientOAuth: true`. It is also not recommended for multi-user server environments, as all users would share the same session. A session is terminated automatically after 24 hours of inactivity or after 7 days, whichever comes first. A new session is created on the next request, and any temporary data from the previous session will be lost. |
|
||||
| allowedDatasets | []string | false | An optional list of dataset IDs that tools using this source are allowed to access. If provided, any tool operation attempting to access a dataset not in this list will be rejected. To enforce this, two types of operations are also disallowed: 1) Dataset-level operations (e.g., `CREATE SCHEMA`), and 2) operations where table access cannot be statically analyzed (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`). If a single dataset is provided, it will be treated as the default for prebuilt tools. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. |
|
||||
| useClientOAuth | bool | false | If true, forwards the client's OAuth access token from the "Authorization" header to downstream queries. **Note:** This cannot be used with `writeMode: protected`. |
|
||||
|
||||
@@ -11,7 +11,7 @@ aliases:
|
||||
## About
|
||||
|
||||
The `cloud-sql-admin` source provides a client to interact with the [Google
|
||||
Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api/v1). This
|
||||
Cloud SQL Admin API](https://cloud.google.com/sql/docs/mysql/admin-api). This
|
||||
allows tools to perform administrative tasks on Cloud SQL instances, such as
|
||||
creating users and databases.
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ maintenance.
|
||||
This source uses standard authentication. You will need to [create a Firebird
|
||||
user][fb-users] to login to the database with.
|
||||
|
||||
[fb-users]: https://firebirdsql.org/refdocs/langrefupd25-sql-create-user.html
|
||||
[fb-users]: https://www.firebirdsql.org/refdocs/langrefupd25-security-sql-user-mgmt.html#langrefupd25-security-create-user
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ in the cloud, on GCP, or on premises.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Database User
|
||||
### Looker User
|
||||
|
||||
This source only uses API authentication. You will need to
|
||||
[create an API user][looker-user] to login to Looker.
|
||||
@@ -24,6 +24,35 @@ This source only uses API authentication. You will need to
|
||||
[looker-user]:
|
||||
https://cloud.google.com/looker/docs/api-auth#authentication_with_an_sdk
|
||||
|
||||
{{< notice note >}}
|
||||
To use the Conversational Analytics API, you will need to have the following
|
||||
Google Cloud Project API enabled and IAM permissions.
|
||||
{{< /notice >}}
|
||||
|
||||
### API Enablement in GCP
|
||||
|
||||
Enable the following APIs in your Google Cloud Project:
|
||||
|
||||
```
|
||||
gcloud services enable geminidataanalytics.googleapis.com --project=$PROJECT_ID
|
||||
gcloud services enable cloudaicompanion.googleapis.com --project=$PROJECT_ID
|
||||
```
|
||||
|
||||
### IAM Permissions in GCP
|
||||
|
||||
In addition to [setting the ADC for your server][set-adc], you need to ensure
|
||||
the IAM identity has been given the following IAM roles (or corresponding
|
||||
permissions):
|
||||
|
||||
- `roles/looker.instanceUser`
|
||||
- `roles/cloudaicompanion.user`
|
||||
- `roles/geminidataanalytics.dataAgentStatelessUser`
|
||||
|
||||
To initialize the application default credential run `gcloud auth login --update-adc`
|
||||
in your environment before starting MCP Toolbox.
|
||||
|
||||
[set-adc]: https://cloud.google.com/docs/authentication/provide-credentials-adc
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
@@ -33,6 +62,8 @@ sources:
|
||||
base_url: http://looker.example.com
|
||||
client_id: ${LOOKER_CLIENT_ID}
|
||||
client_secret: ${LOOKER_CLIENT_SECRET}
|
||||
project: ${LOOKER_PROJECT}
|
||||
location: ${LOOKER_LOCATION}
|
||||
verify_ssl: true
|
||||
timeout: 600s
|
||||
```
|
||||
@@ -50,6 +81,8 @@ The client id and client secret are seemingly random character sequences
|
||||
assigned by the looker server. If you are using Looker OAuth you don't need
|
||||
these settings
|
||||
|
||||
The `project` and `location` fields are utilized **only** when using the conversational analytics tool.
|
||||
|
||||
{{< notice tip >}}
|
||||
Use environment variable replacement with the format ${ENV_NAME}
|
||||
instead of hardcoding your secrets into the configuration file.
|
||||
@@ -64,6 +97,8 @@ instead of hardcoding your secrets into the configuration file.
|
||||
| client_id | string | false | The client id assigned by Looker. |
|
||||
| client_secret | string | false | The client secret assigned by Looker. |
|
||||
| verify_ssl | string | false | Whether to check the ssl certificate of the server. |
|
||||
| project | string | false | The project id to use in Google Cloud. |
|
||||
| location | string | false | The location to use in Google Cloud. (default: us) |
|
||||
| timeout | string | false | Maximum time to wait for query execution (e.g. "30s", "2m"). By default, 120s is applied. |
|
||||
| use_client_oauth | string | false | Use OAuth tokens instead of client_id and client_secret. (default: false) |
|
||||
| show_hidden_models | string | false | Show or hide hidden models. (default: true) |
|
||||
|
||||
@@ -16,7 +16,7 @@ lists, sets, sorted sets with range queries, bitmaps, hyperloglogs, and
|
||||
geospatial indexes with radius queries.
|
||||
|
||||
If you are new to Redis, you can find installation and getting started guides on
|
||||
the [official Redis website](https://redis.io/docs/getting-started/).
|
||||
the [official Redis website](https://redis.io/docs/).
|
||||
|
||||
## Available Tools
|
||||
|
||||
|
||||
@@ -39,6 +39,13 @@ It's compatible with the following sources:
|
||||
insights. Can be `'NO_PRUNING'` or `'PRUNE_REDUNDANT_INSIGHTS'`. Defaults to
|
||||
`'PRUNE_REDUNDANT_INSIGHTS'`.
|
||||
|
||||
The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:
|
||||
|
||||
- **`allowed` (default) and `blocked`:** These modes do not impose any special restrictions on the `bigquery-analyze-contribution` tool.
|
||||
- **`protected`:** This mode enables session-based execution. The tool will operate within the same BigQuery session as other
|
||||
tools using the same source. This allows the `input_data` parameter to be a query that references temporary resources (e.g.,
|
||||
`TEMP` tables) created within that session.
|
||||
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -20,8 +20,15 @@ It's compatible with the following sources:
|
||||
- **`dry_run`** (optional): If set to `true`, the query is validated but not run,
|
||||
returning information about the execution instead. Defaults to `false`.
|
||||
|
||||
The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:
|
||||
|
||||
- **`allowed` (default):** All SQL statements are permitted.
|
||||
- **`blocked`:** Only `SELECT` statements are allowed. Any other type of statement (e.g., `INSERT`, `UPDATE`, `CREATE`) will be rejected.
|
||||
- **`protected`:** This mode enables session-based execution. `SELECT` statements can be used on all tables, while write operations are allowed only for the session's temporary dataset (e.g., `CREATE TEMP TABLE ...`). This prevents modifications to permanent datasets while allowing stateful, multi-step operations within a secure session.
|
||||
|
||||
The tool's behavior is influenced by the `allowedDatasets` restriction on the
|
||||
`bigquery` source:
|
||||
`bigquery` source. Similar to `writeMode`, this setting provides an additional layer of security by controlling which datasets can be accessed:
|
||||
|
||||
- **Without `allowedDatasets` restriction:** The tool can execute any valid GoogleSQL
|
||||
query.
|
||||
- **With `allowedDatasets` restriction:** Before execution, the tool performs a dry run
|
||||
@@ -33,6 +40,8 @@ The tool's behavior is influenced by the `allowedDatasets` restriction on the
|
||||
- **Unanalyzable operations** where the accessed tables cannot be determined
|
||||
statically (e.g., `EXECUTE IMMEDIATE`, `CREATE PROCEDURE`, `CALL`).
|
||||
|
||||
> **Note:** This tool is intended for developer assistant workflows with human-in-the-loop and shouldn't be used for production agents.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
|
||||
@@ -33,12 +33,19 @@ query based on the provided parameters:
|
||||
- **horizon** (integer, optional): The number of future time steps you want to
|
||||
predict. It defaults to 10 if not specified.
|
||||
|
||||
The tool's behavior regarding these parameters is influenced by the `allowedDatasets` restriction on the `bigquery` source:
|
||||
The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:
|
||||
|
||||
- **`allowed` (default) and `blocked`:** These modes do not impose any special restrictions on the `bigquery-forecast` tool.
|
||||
- **`protected`:** This mode enables session-based execution. The tool will operate within the same BigQuery session as other
|
||||
tools using the same source. This allows the `history_data` parameter to be a query that references temporary resources (e.g.,
|
||||
`TEMP` tables) created within that session.
|
||||
|
||||
The tool's behavior is also influenced by the `allowedDatasets` restriction on the `bigquery` source:
|
||||
|
||||
- **Without `allowedDatasets` restriction:** The tool can use any table or query for the `history_data` parameter.
|
||||
- **With `allowedDatasets` restriction:** The tool verifies that the `history_data` parameter only accesses tables
|
||||
within the allowed datasets. If `history_data` is a table ID, the tool checks if the table's dataset is in the
|
||||
allowed list. If `history_data` is a query, the tool performs a dry run to analyze the query and rejects it
|
||||
if it accesses any table outside the allowed list.
|
||||
- **With `allowedDatasets` restriction:** The tool verifies that the `history_data` parameter only accesses tables within the allowed datasets.
|
||||
- If `history_data` is a table ID, the tool checks if the table's dataset is in the allowed list.
|
||||
- If `history_data` is a query, the tool performs a dry run to analyze the query and rejects it if it accesses any table outside the allowed list.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,10 +15,20 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-get-table-info` takes `dataset` and `table` parameters to specify
|
||||
the target table. It also optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
`bigquery-get-table-info` accepts the following parameters:
|
||||
- **`table`** (required): The name of the table for which to retrieve metadata.
|
||||
- **`dataset`** (required): The dataset containing the specified table.
|
||||
- **`project`** (optional): The Google Cloud project ID. If not provided, the
|
||||
tool defaults to the project from the source configuration.
|
||||
|
||||
The tool's behavior regarding these parameters is influenced by the
|
||||
`allowedDatasets` restriction on the `bigquery` source:
|
||||
- **Without `allowedDatasets` restriction:** The tool can retrieve metadata for
|
||||
any table specified by the `table`, `dataset`, and `project` parameters.
|
||||
- **With `allowedDatasets` restriction:** Before retrieving metadata, the tool
|
||||
verifies that the requested dataset is in the allowed list. If it is not, the
|
||||
request is denied. If only one dataset is specified in the `allowedDatasets`
|
||||
list, it will be used as the default value for the `dataset` parameter.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,9 +15,17 @@ It's compatible with the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
`bigquery-list-dataset-ids` optionally accepts a `project` parameter to define
|
||||
the Google Cloud project ID. If the `project` parameter is not provided, the
|
||||
tool defaults to using the project defined in the source configuration.
|
||||
`bigquery-list-dataset-ids` accepts the following parameter:
|
||||
- **`project`** (optional): Defines the Google Cloud project ID. If not provided,
|
||||
the tool defaults to the project from the source configuration.
|
||||
|
||||
The tool's behavior regarding this parameter is influenced by the
|
||||
`allowedDatasets` restriction on the `bigquery` source:
|
||||
- **Without `allowedDatasets` restriction:** The tool can list datasets from any
|
||||
project specified by the `project` parameter.
|
||||
- **With `allowedDatasets` restriction:** The tool directly returns the
|
||||
pre-configured list of dataset IDs from the source, and the `project`
|
||||
parameter is ignored.
|
||||
|
||||
## Example
|
||||
|
||||
|
||||
@@ -15,6 +15,11 @@ the following sources:
|
||||
|
||||
- [bigquery](../../sources/bigquery.md)
|
||||
|
||||
The behavior of this tool is influenced by the `writeMode` setting on its `bigquery` source:
|
||||
|
||||
- **`allowed` (default) and `blocked`:** These modes do not impose any restrictions on the `bigquery-sql` tool. The pre-defined SQL statement will be executed as-is.
|
||||
- **`protected`:** This mode enables session-based execution. The tool will operate within the same BigQuery session as other tools using the same source, allowing it to interact with temporary resources like `TEMP` tables created within that session.
|
||||
|
||||
### GoogleSQL
|
||||
|
||||
BigQuery uses [GoogleSQL][bigquery-googlesql] for querying data. The integration
|
||||
|
||||
7
docs/en/resources/tools/dataform/_index.md
Normal file
7
docs/en/resources/tools/dataform/_index.md
Normal file
@@ -0,0 +1,7 @@
|
||||
---
|
||||
title: "Dataform"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
Tools that work with Dataform.
|
||||
---
|
||||
48
docs/en/resources/tools/dataform/dataform-compile-local.md
Normal file
48
docs/en/resources/tools/dataform/dataform-compile-local.md
Normal file
@@ -0,0 +1,48 @@
|
||||
---
|
||||
title: "dataform-compile-local"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
A "dataform-compile-local" tool runs the `dataform compile` CLI command on a local project directory.
|
||||
aliases:
|
||||
- /resources/tools/dataform-compile-local
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `dataform-compile-local` tool runs the `dataform compile` command on a local Dataform project.
|
||||
|
||||
It is a standalone tool and **is not** compatible with any sources.
|
||||
|
||||
At invocation time, the tool executes `dataform compile --json` in the specified project directory and returns the resulting JSON object from the CLI.
|
||||
|
||||
`dataform-compile-local` takes the following parameter:
|
||||
- `project_dir` (string): The absolute or relative path to the local Dataform project directory. The server process must have read access to this path.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Dataform CLI
|
||||
|
||||
This tool executes the `dataform` command-line interface (CLI) via a system call. You must have the **`dataform` CLI** installed and available in the server's system `PATH`.
|
||||
|
||||
You can typically install the CLI via `npm`:
|
||||
```bash
|
||||
npm install -g @dataform/cli
|
||||
```
|
||||
|
||||
See the [official Dataform documentation](https://www.google.com/search?q=https://cloud.google.com/dataform/docs/install-dataform-cli) for more details.
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
my_dataform_compiler:
|
||||
kind: dataform-compile-local
|
||||
description: Use this tool to compile a local Dataform project.
|
||||
```
|
||||
|
||||
## Reference
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| :---- | :---- | :---- | :---- |
|
||||
| kind | string | true | Must be "dataform-compile-local". |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -3,8 +3,7 @@ title: "looker-add-dashboard-element"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-add-dashboard-element" generates a Looker look in the users personal folder in
|
||||
Looker
|
||||
"looker-add-dashboard-element" creates a dashboard element in the given dashboard.
|
||||
aliases:
|
||||
- /resources/tools/looker-add-dashboard-element
|
||||
---
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
---
|
||||
title: "looker-conversational-analytics"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
The "looker-conversational-analytics" tool will use the Conversational
|
||||
Analaytics API to analyze data from Looker
|
||||
aliases:
|
||||
- /resources/tools/looker-conversational-analytics
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
A `looker-conversational-analytics` tool allows you to ask questions about your Looker data.
|
||||
|
||||
It's compatible with the following sources:
|
||||
|
||||
- [looker](../../sources/looker.md)
|
||||
|
||||
`looker-conversational-analytics` accepts two parameters:
|
||||
|
||||
1. `user_query_with_context`: The question asked of the Conversational Analytics system.
|
||||
2. `explore_references`: A list of one to five explores that can be queried to answer the
|
||||
question. The form of the entry is `[{"model": "model name", "explore": "explore name"}, ...]`
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
ask_data_insights:
|
||||
kind: looker-conversational-analytics
|
||||
source: looker-source
|
||||
description: |
|
||||
Use this tool to perform data analysis, get insights,
|
||||
or answer complex questions about the contents of specific
|
||||
Looker explores.
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|-------------|:--------:|:------------:|----------------------------------------------------|
|
||||
| kind | string | true | Must be "lookerca-conversational-analytics". |
|
||||
| source | string | true | Name of the source the SQL should execute on. |
|
||||
| description | string | true | Description of the tool that is passed to the LLM. |
|
||||
@@ -3,8 +3,7 @@ title: "looker-get-dashboards"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-get-dashboards" searches for saved Looks in a Looker
|
||||
source.
|
||||
"looker-get-dashboards" tool searches for a saved Dashboard by name or description.
|
||||
aliases:
|
||||
- /resources/tools/looker-get-dashboards
|
||||
---
|
||||
|
||||
63
docs/en/resources/tools/looker/looker-health-analyze.md
Normal file
63
docs/en/resources/tools/looker/looker-health-analyze.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
title: "looker-health-analyze"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-health-analyze" provides a set of analytical commands for a Looker instance, allowing users to analyze projects, models, and explores.
|
||||
aliases:
|
||||
- /resources/tools/looker-health-analyze
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-health-analyze` tool performs various analysis tasks on a Looker instance. The `action` parameter selects the type of analysis to perform:
|
||||
|
||||
- `projects`: Analyzes all projects or a specified project, reporting on the number of models and view files, as well as Git connection and validation status.
|
||||
- `models`: Analyzes all models or a specified model, providing a count of explores, unused explores, and total query counts.
|
||||
- `explores`: Analyzes all explores or a specified explore, reporting on the number of joins, unused joins, fields, unused fields, and query counts. Being classified as **Unused** is determined by whether a field has been used as a field or filter within the past 90 days in production.
|
||||
|
||||
## Parameters
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| kind | string | true | Must be "looker-health-analyze" |
|
||||
| source | string | true | Looker source name |
|
||||
| action | string | true | The analysis to perform: `projects`, `models`, or `explores`. |
|
||||
| project | string | false | The name of the Looker project to analyze. |
|
||||
| model | string | false | The name of the Looker model to analyze. Required for `explores` actions. |
|
||||
| explore | string | false | The name of the Looker explore to analyze. Required for the `explores` action. |
|
||||
| timeframe | int | false | The timeframe in days to analyze. Defaults to 90. |
|
||||
| min_queries | int | false | The minimum number of queries for a model or explore to be considered used. Defaults to 1. |
|
||||
|
||||
## Example
|
||||
|
||||
Analyze all models in `thelook` project.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
analyze-tool:
|
||||
kind: looker-health-analyze
|
||||
source: looker-source
|
||||
description: |
|
||||
Analyzes Looker projects, models, and explores.
|
||||
Specify the `action` parameter to select the type of analysis.
|
||||
parameters:
|
||||
action: models
|
||||
project: "thelook"
|
||||
|
||||
Analyze all the explores in the `ecomm` model of `thelook` project. Specifically look at usage within the past 20 days. Usage minimum should be at least 10 queries.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
analyze-tool:
|
||||
kind: looker-health-analyze
|
||||
source: looker-source
|
||||
description: |
|
||||
Analyzes Looker projects, models, and explores.
|
||||
Specify the `action` parameter to select the type of analysis.
|
||||
parameters:
|
||||
action: explores
|
||||
project: "thelook"
|
||||
model: "ecomm"
|
||||
timeframe: 20
|
||||
min_queries: 10
|
||||
55
docs/en/resources/tools/looker/looker-health-pulse.md
Normal file
55
docs/en/resources/tools/looker/looker-health-pulse.md
Normal file
@@ -0,0 +1,55 @@
|
||||
---
|
||||
title: "looker-health-pulse"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-health-pulse" performs health checks on a Looker instance, with multiple actions available (e.g., checking database connections, dashboard performance, etc).
|
||||
aliases:
|
||||
- /resources/tools/looker-health-pulse
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-health-pulse` tool performs health checks on a Looker instance. The `action` parameter selects the type of check to perform:
|
||||
|
||||
- `check_db_connections`: Checks all database connections, runs supported tests, and reports query counts.
|
||||
- `check_dashboard_performance`: Finds dashboards with slow running queries in the last 7 days.
|
||||
- `check_dashboard_errors`: Lists dashboards with erroring queries in the last 7 days.
|
||||
- `check_explore_performance`: Lists the slowest explores in the last 7 days and reports average query runtime.
|
||||
- `check_schedule_failures`: Lists schedules that have failed in the last 7 days.
|
||||
- `check_legacy_features`: Lists enabled legacy features. (*To note, this function is not available in Looker Core. You will get an error running this command with a Core instance configured.*)
|
||||
|
||||
## Parameters
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
|---------------|:--------:|:------------:|---------------------------------------------|
|
||||
| kind | string | true | Must be "looker-health-pulse" |
|
||||
| source | string | true | Looker source name |
|
||||
| action | string | true | The health check to perform |
|
||||
|
||||
## Example
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
pulse:
|
||||
kind: looker-health-pulse
|
||||
source: looker-source
|
||||
description: |
|
||||
Pulse Tool
|
||||
|
||||
Performs health checks on Looker instance.
|
||||
Specify the `action` parameter to select the check.
|
||||
parameters:
|
||||
action: check_dashboard_performance
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
| **action** | **description** |
|
||||
|---------------------------|--------------------------------------------------------------------------------|
|
||||
| check_db_connections | Checks all database connections and reports query counts and errors |
|
||||
| check_dashboard_performance | Finds dashboards with slow queries (>30s) in the last 7 days |
|
||||
| check_dashboard_errors | Lists dashboards with erroring queries in the last 7 days |
|
||||
| check_explore_performance | Lists slowest explores and average query runtime |
|
||||
| check_schedule_failures | Lists failed schedules in the last 7 days |
|
||||
| check_legacy_features | Lists enabled legacy features |
|
||||
63
docs/en/resources/tools/looker/looker-health-vacuum.md
Normal file
63
docs/en/resources/tools/looker/looker-health-vacuum.md
Normal file
@@ -0,0 +1,63 @@
|
||||
---
|
||||
title: "looker-health-vacuum"
|
||||
type: docs
|
||||
weight: 1
|
||||
description: >
|
||||
"looker-health-vacuum" provides a set of commands to audit and identify unused LookML objects in a Looker instance.
|
||||
aliases:
|
||||
- /resources/tools/looker-health-vacuum
|
||||
---
|
||||
|
||||
## About
|
||||
|
||||
The `looker-health-vacuum` tool helps you identify unused LookML objects such as models, explores, joins, and fields. The `action` parameter selects the type of vacuum to perform:
|
||||
|
||||
- `models`: Identifies unused explores within a model.
|
||||
- `explores`: Identifies unused joins and fields within an explore.
|
||||
|
||||
## Parameters
|
||||
|
||||
| **field** | **type** | **required** | **description** |
|
||||
| :--- | :--- | :--- | :--- |
|
||||
| kind | string | true | Must be "looker-health-vacuum" |
|
||||
| source | string | true | Looker source name |
|
||||
| action | string | true | The vacuum to perform: `models`, or `explores`. |
|
||||
| project | string | false | The name of the Looker project to vacuum. |
|
||||
| model | string | false | The name of the Looker model to vacuum. |
|
||||
| explore | string | false | The name of the Looker explore to vacuum. |
|
||||
| timeframe | int | false | The timeframe in days to analyze for usage. Defaults to 90. |
|
||||
| min_queries | int | false | The minimum number of queries for an object to be considered used. Defaults to 1. |
|
||||
|
||||
## Example
|
||||
|
||||
Identify unnused fields (*in this case, less than 1 query in the last 20 days*) and joins in the `order_items` explore and `thelook` model
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
vacuum-tool:
|
||||
kind: looker-health-vacuum
|
||||
source: looker-source
|
||||
description: |
|
||||
Vacuums the Looker instance by identifying unused explores, fields, and joins.
|
||||
parameters:
|
||||
action: explores
|
||||
project: "thelook_core"
|
||||
model: "thelook"
|
||||
explore: "order_items"
|
||||
timeframe: 20
|
||||
min_queries: 1
|
||||
```
|
||||
|
||||
Identify unnused explores across all models in `thelook_core` project.
|
||||
|
||||
```yaml
|
||||
tools:
|
||||
vacuum-tool:
|
||||
kind: looker-health-vacuum
|
||||
source: looker-source
|
||||
description: |
|
||||
Vacuums the Looker instance by identifying unused explores, fields, and joins.
|
||||
parameters:
|
||||
action: models
|
||||
project: "thelook_core"
|
||||
|
||||
29
go.mod
29
go.mod
@@ -7,11 +7,12 @@ toolchain go1.25.1
|
||||
require (
|
||||
cloud.google.com/go/alloydbconn v1.15.5
|
||||
cloud.google.com/go/bigquery v1.71.0
|
||||
cloud.google.com/go/bigtable v1.40.0
|
||||
cloud.google.com/go/bigtable v1.40.1
|
||||
cloud.google.com/go/cloudsqlconn v1.18.1
|
||||
cloud.google.com/go/dataplex v1.27.1
|
||||
cloud.google.com/go/firestore v1.18.0
|
||||
cloud.google.com/go/spanner v1.85.1
|
||||
cloud.google.com/go/firestore v1.19.0
|
||||
cloud.google.com/go/geminidataanalytics v0.2.1
|
||||
cloud.google.com/go/spanner v1.86.0
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.40.3
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/metric v0.53.0
|
||||
github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.0
|
||||
@@ -23,7 +24,7 @@ require (
|
||||
github.com/go-chi/httplog/v2 v2.1.1
|
||||
github.com/go-chi/render v1.0.3
|
||||
github.com/go-goquery/goquery v1.0.1
|
||||
github.com/go-playground/validator/v10 v10.27.0
|
||||
github.com/go-playground/validator/v10 v10.28.0
|
||||
github.com/go-sql-driver/mysql v1.9.3
|
||||
github.com/goccy/go-yaml v1.18.0
|
||||
github.com/gocql/gocql v1.7.0
|
||||
@@ -34,12 +35,12 @@ require (
|
||||
github.com/looker-open-source/sdk-codegen/go v0.25.11
|
||||
github.com/microsoft/go-mssqldb v1.9.3
|
||||
github.com/nakagami/firebirdsql v0.9.15
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.3
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4
|
||||
github.com/redis/go-redis/v9 v9.14.0
|
||||
github.com/spf13/cobra v1.9.1
|
||||
github.com/spf13/cobra v1.10.1
|
||||
github.com/thlib/go-timezone-local v0.0.7
|
||||
github.com/trinodb/trino-go-client v0.329.0
|
||||
github.com/valkey-io/valkey-go v1.0.64
|
||||
github.com/valkey-io/valkey-go v1.0.66
|
||||
github.com/yugabyte/pgx/v5 v5.5.3-yb-5
|
||||
go.mongodb.org/mongo-driver v1.17.4
|
||||
go.opentelemetry.io/contrib/propagators/autoprop v0.62.0
|
||||
@@ -51,8 +52,8 @@ require (
|
||||
go.opentelemetry.io/otel/sdk/metric v1.37.0
|
||||
go.opentelemetry.io/otel/trace v1.38.0
|
||||
golang.org/x/oauth2 v0.31.0
|
||||
google.golang.org/api v0.250.0
|
||||
google.golang.org/genproto v0.0.0-20250922171735-9219d122eba9
|
||||
google.golang.org/api v0.251.0
|
||||
google.golang.org/genproto v0.0.0-20250929231259-57b25ae835d4
|
||||
modernc.org/sqlite v1.39.0
|
||||
)
|
||||
|
||||
@@ -74,7 +75,7 @@ require (
|
||||
cloud.google.com/go/alloydb v1.18.0 // indirect
|
||||
cloud.google.com/go/auth v0.16.5 // indirect
|
||||
cloud.google.com/go/auth/oauth2adapt v0.2.8 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.8.4 // indirect
|
||||
cloud.google.com/go/compute/metadata v0.9.0 // indirect
|
||||
cloud.google.com/go/iam v1.5.2 // indirect
|
||||
cloud.google.com/go/longrunning v0.6.7 // indirect
|
||||
cloud.google.com/go/monitoring v1.24.2 // indirect
|
||||
@@ -98,7 +99,7 @@ require (
|
||||
github.com/envoyproxy/go-control-plane/envoy v1.32.4 // indirect
|
||||
github.com/envoyproxy/protoc-gen-validate v1.2.1 // indirect
|
||||
github.com/felixge/httpsnoop v1.0.4 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.10 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
@@ -142,7 +143,7 @@ require (
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/spf13/pflag v1.0.6 // indirect
|
||||
github.com/spf13/pflag v1.0.9 // indirect
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.1.2 // indirect
|
||||
@@ -174,8 +175,8 @@ require (
|
||||
golang.org/x/time v0.13.0 // indirect
|
||||
golang.org/x/tools v0.36.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250908214217-97024824d090 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250922171735-9219d122eba9 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250929231259-57b25ae835d4 // indirect
|
||||
google.golang.org/grpc v1.75.1 // indirect
|
||||
google.golang.org/protobuf v1.36.9 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
|
||||
58
go.sum
58
go.sum
@@ -139,8 +139,8 @@ cloud.google.com/go/bigquery v1.49.0/go.mod h1:Sv8hMmTFFYBlt/ftw2uN6dFdQPzBlREY9
|
||||
cloud.google.com/go/bigquery v1.50.0/go.mod h1:YrleYEh2pSEbgTBZYMJ5SuSr0ML3ypjRB1zgf7pvQLU=
|
||||
cloud.google.com/go/bigquery v1.71.0 h1:NvSZvXU1Hyb+YiRVKQPuQXGeZaw/0NP6M/WOrBqSx3g=
|
||||
cloud.google.com/go/bigquery v1.71.0/go.mod h1:GUbRtmeCckOE85endLherHD9RsujY+gS7i++c1CqssQ=
|
||||
cloud.google.com/go/bigtable v1.40.0 h1:iNeqGqkJvFdjg07Ku3F7KKfq5QZvBySisYHVsLB1RwE=
|
||||
cloud.google.com/go/bigtable v1.40.0/go.mod h1:LtPzCcrAFaGRZ82Hs8xMueUeYW9Jw12AmNdUTMfDnh4=
|
||||
cloud.google.com/go/bigtable v1.40.1 h1:k8HfpUOvn7sQwc6oNKqjvD/yjkwynf4qBuyKwh5cU08=
|
||||
cloud.google.com/go/bigtable v1.40.1/go.mod h1:LtPzCcrAFaGRZ82Hs8xMueUeYW9Jw12AmNdUTMfDnh4=
|
||||
cloud.google.com/go/billing v1.4.0/go.mod h1:g9IdKBEFlItS8bTtlrZdVLWSSdSyFUZKXNS02zKMOZY=
|
||||
cloud.google.com/go/billing v1.5.0/go.mod h1:mztb1tBc3QekhjSgmpf/CV4LzWXLzCArwpLmP2Gm88s=
|
||||
cloud.google.com/go/billing v1.6.0/go.mod h1:WoXzguj+BeHXPbKfNWkqVtDdzORazmCjraY+vrxcyvI=
|
||||
@@ -194,8 +194,8 @@ cloud.google.com/go/compute/metadata v0.1.0/go.mod h1:Z1VN+bulIf6bt4P/C37K4DyZYZ
|
||||
cloud.google.com/go/compute/metadata v0.2.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1hT1fIilQDwofLpJ20k=
|
||||
cloud.google.com/go/compute/metadata v0.2.1/go.mod h1:jgHgmJd2RKBGzXqF5LR2EZMGxBkeanZ9wwa75XHJgOM=
|
||||
cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA=
|
||||
cloud.google.com/go/compute/metadata v0.8.4 h1:oXMa1VMQBVCyewMIOm3WQsnVd9FbKBtm8reqWRaXnHQ=
|
||||
cloud.google.com/go/compute/metadata v0.8.4/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
||||
cloud.google.com/go/compute/metadata v0.9.0 h1:pDUj4QMoPejqq20dK0Pg2N4yG9zIkYGdBtwLoEkH9Zs=
|
||||
cloud.google.com/go/compute/metadata v0.9.0/go.mod h1:E0bWwX5wTnLPedCKqk3pJmVgCBSM6qQI1yTBdEb3C10=
|
||||
cloud.google.com/go/contactcenterinsights v1.3.0/go.mod h1:Eu2oemoePuEFc/xKFPjbTuPSj0fYJcPls9TFlPNnHHY=
|
||||
cloud.google.com/go/contactcenterinsights v1.4.0/go.mod h1:L2YzkGbPsv+vMQMCADxJoT9YiTTnSEd6fEvCeHTYVck=
|
||||
cloud.google.com/go/contactcenterinsights v1.6.0/go.mod h1:IIDlT6CLcDoyv79kDv8iWxMSTZhLxSCofVV5W6YFM/w=
|
||||
@@ -295,8 +295,8 @@ cloud.google.com/go/filestore v1.4.0/go.mod h1:PaG5oDfo9r224f8OYXURtAsY+Fbyq/bLY
|
||||
cloud.google.com/go/filestore v1.5.0/go.mod h1:FqBXDWBp4YLHqRnVGveOkHDf8svj9r5+mUDLupOWEDs=
|
||||
cloud.google.com/go/filestore v1.6.0/go.mod h1:di5unNuss/qfZTw2U9nhFqo8/ZDSc466dre85Kydllg=
|
||||
cloud.google.com/go/firestore v1.9.0/go.mod h1:HMkjKHNTtRyZNiMzu7YAsLr9K3X2udY2AMwDaMEQiiE=
|
||||
cloud.google.com/go/firestore v1.18.0 h1:cuydCaLS7Vl2SatAeivXyhbhDEIR8BDmtn4egDhIn2s=
|
||||
cloud.google.com/go/firestore v1.18.0/go.mod h1:5ye0v48PhseZBdcl0qbl3uttu7FIEwEYVaWm0UIEOEU=
|
||||
cloud.google.com/go/firestore v1.19.0 h1:E3FiRsWfZKwZ6W+Lsp1YqTzZ9H6jP+QsKW40KR21C8I=
|
||||
cloud.google.com/go/firestore v1.19.0/go.mod h1:jqu4yKdBmDN5srneWzx3HlKrHFWFdlkgjgQ6BKIOFQo=
|
||||
cloud.google.com/go/functions v1.6.0/go.mod h1:3H1UA3qiIPRWD7PeZKLvHZ9SaQhR26XIJcC0A5GbvAk=
|
||||
cloud.google.com/go/functions v1.7.0/go.mod h1:+d+QBcWM+RsrgZfV9xo6KfA1GlzJfxcfZcRPEhDDfzg=
|
||||
cloud.google.com/go/functions v1.8.0/go.mod h1:RTZ4/HsQjIqIYP9a9YPbU+QFoQsAlYgrwOXJWHn1POY=
|
||||
@@ -309,6 +309,8 @@ cloud.google.com/go/gaming v1.6.0/go.mod h1:YMU1GEvA39Qt3zWGyAVA9bpYz/yAhTvaQ1t2
|
||||
cloud.google.com/go/gaming v1.7.0/go.mod h1:LrB8U7MHdGgFG851iHAfqUdLcKBdQ55hzXy9xBJz0+w=
|
||||
cloud.google.com/go/gaming v1.8.0/go.mod h1:xAqjS8b7jAVW0KFYeRUxngo9My3f33kFmua++Pi+ggM=
|
||||
cloud.google.com/go/gaming v1.9.0/go.mod h1:Fc7kEmCObylSWLO334NcO+O9QMDyz+TKC4v1D7X+Bc0=
|
||||
cloud.google.com/go/geminidataanalytics v0.2.1 h1:gtG/9VlUJpL67yukFen/twkAEHliYvW7610Rlnn5rpQ=
|
||||
cloud.google.com/go/geminidataanalytics v0.2.1/go.mod h1:gIsj/ELDCzVbw24185zwjXgbzYiqdGe7TSSK2HrdtA0=
|
||||
cloud.google.com/go/gkebackup v0.2.0/go.mod h1:XKvv/4LfG829/B8B7xRkk8zRrOEbKtEam6yNfuQNH60=
|
||||
cloud.google.com/go/gkebackup v0.3.0/go.mod h1:n/E671i1aOQvUxT541aTkCwExO/bTer2HDlj4TsBRAo=
|
||||
cloud.google.com/go/gkebackup v0.4.0/go.mod h1:byAyBGUwYGEEww7xsbnUTBHIYcOPy/PgUWUtOeRm9Vg=
|
||||
@@ -544,8 +546,8 @@ cloud.google.com/go/shell v1.6.0/go.mod h1:oHO8QACS90luWgxP3N9iZVuEiSF84zNyLytb+
|
||||
cloud.google.com/go/spanner v1.41.0/go.mod h1:MLYDBJR/dY4Wt7ZaMIQ7rXOTLjYrmxLE/5ve9vFfWos=
|
||||
cloud.google.com/go/spanner v1.44.0/go.mod h1:G8XIgYdOK+Fbcpbs7p2fiprDw4CaZX63whnSMLVBxjk=
|
||||
cloud.google.com/go/spanner v1.45.0/go.mod h1:FIws5LowYz8YAE1J8fOS7DJup8ff7xJeetWEo5REA2M=
|
||||
cloud.google.com/go/spanner v1.85.1 h1:cJx1ZD//C2QIfFQl8hSTn4twL8amAXtnayyflRIjj40=
|
||||
cloud.google.com/go/spanner v1.85.1/go.mod h1:bbwCXbM+zljwSPLZ44wZOdzcdmy89hbUGmM/r9sD0ws=
|
||||
cloud.google.com/go/spanner v1.86.0 h1:jlNWusBol1Jxa9PmYGknUBzLwvD1cebuEenzqebZ9xs=
|
||||
cloud.google.com/go/spanner v1.86.0/go.mod h1:bbwCXbM+zljwSPLZ44wZOdzcdmy89hbUGmM/r9sD0ws=
|
||||
cloud.google.com/go/speech v1.6.0/go.mod h1:79tcr4FHCimOp56lwC01xnt/WPJZc4v3gzyT7FoBkCM=
|
||||
cloud.google.com/go/speech v1.7.0/go.mod h1:KptqL+BAQIhMsj1kOP2la5DSEEerPDuOP/2mmkhHhZQ=
|
||||
cloud.google.com/go/speech v1.8.0/go.mod h1:9bYIl1/tjsAnMgKGHKmBZzXKEkGgtU+MpdDPTE9f7y0=
|
||||
@@ -847,8 +849,8 @@ github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHk
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.9.0 h1:2Ml+OJNzbYCTzsxtv8vKSFD9PbJjmhYF14k/jKC7S9k=
|
||||
github.com/fsnotify/fsnotify v1.9.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8 h1:FfZ3gj38NjllZIeJAmMhr+qKL8Wu+nOoI3GqacKw1NM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.8/go.mod h1:ByKUIKGjh1ODkGM1asKUbQZOLGrPjydw3hYPU2YU9t8=
|
||||
github.com/gabriel-vasile/mimetype v1.4.10 h1:zyueNbySn/z8mJZHLt6IPw0KoZsiQNszIpU+bX4+ZK0=
|
||||
github.com/gabriel-vasile/mimetype v1.4.10/go.mod h1:d+9Oxyo1wTzWdyVUPMmXFvp4F9tea18J8ufA774AB3s=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
|
||||
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
|
||||
@@ -889,8 +891,8 @@ github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/o
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.27.0 h1:w8+XrWVMhGkxOaaowyKH35gFydVHOvC0/uWoy2Fzwn4=
|
||||
github.com/go-playground/validator/v10 v10.27.0/go.mod h1:I5QpIEbmr8On7W0TktmJAumgzX4CA1XNl4ZmDuVHKKo=
|
||||
github.com/go-playground/validator/v10 v10.28.0 h1:Q7ibns33JjyW48gHkuFT91qX48KG0ktULL6FgHdG688=
|
||||
github.com/go-playground/validator/v10 v10.28.0/go.mod h1:GoI6I1SjPBh9p7ykNE/yj3fFYbyDOpwMn5KXd+m2hUU=
|
||||
github.com/go-sql-driver/mysql v1.9.3 h1:U/N249h2WzJ3Ukj8SowVFjdtZKfu9vlLZxjPXV1aweo=
|
||||
github.com/go-sql-driver/mysql v1.9.3/go.mod h1:qn46aNg1333BRMNU69Lq93t8du/dwxI64Gl8i5p1WMU=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
@@ -1164,8 +1166,8 @@ github.com/nakagami/firebirdsql v0.9.15 h1:Mf05jaFI8+kjy6sBstsAu76zOkJ44AGd6cpAp
|
||||
github.com/nakagami/firebirdsql v0.9.15/go.mod h1:bZKRs3rpHAjJgXAoc9YiPobTz3R22i41Zjo+llIS2B0=
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.3 h1:OHP/vzX0oZ2YUY5DnGUp7QY21BIpOzw+Pp+Dga8zYl4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.3/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4 h1:7toxehVcYkZbyxV4W3Ib9VcnyRBQPucF+VwNNmtSXi4=
|
||||
github.com/neo4j/neo4j-go-driver/v5 v5.28.4/go.mod h1:Vff8OwT7QpLm7L2yYr85XNWe9Rbqlbeb9asNXJTHO4k=
|
||||
github.com/onsi/gomega v1.36.2 h1:koNYke6TVk6ZmnyHrCXba/T/MoLBXFjeC1PtvYgw0A8=
|
||||
github.com/onsi/gomega v1.36.2/go.mod h1:DdwyADRjrc825LhMEkD76cHR5+pUnjhUN8GlHlRPHzY=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
@@ -1229,10 +1231,10 @@ github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasO
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
|
||||
github.com/spf13/cobra v1.9.1 h1:CXSaggrXdbHK9CF+8ywj8Amf7PBRmPCOJugH954Nnlo=
|
||||
github.com/spf13/cobra v1.9.1/go.mod h1:nDyEzZ8ogv936Cinf6g1RU9MRY64Ir93oCnqb9wxYW0=
|
||||
github.com/spf13/pflag v1.0.6 h1:jFzHGLGAlb3ruxLB8MhbI6A8+AQX/2eW4qeyNZXNp2o=
|
||||
github.com/spf13/pflag v1.0.6/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/cobra v1.10.1 h1:lJeBwCfmrnXthfAupyUTzJ/J4Nc1RsHC/mSRU2dll/s=
|
||||
github.com/spf13/cobra v1.10.1/go.mod h1:7SmJGaTHFVBY0jW4NXGluQoLvhqFQM+6XSKD+P4XaB0=
|
||||
github.com/spf13/pflag v1.0.9 h1:9exaQaMOCwffKiiiYk6/BndUBv+iRViNW+4lEMi0PvY=
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0 h1:N2I01KCUkv1FAjZXJMwh95KK1ZIQLYbPfhaxw8WS0hE=
|
||||
github.com/spiffe/go-spiffe/v2 v2.5.0/go.mod h1:P+NxobPc6wXhVtINNtFjNWGBTreew1GBUCwT2wPmb7g=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
@@ -1258,8 +1260,8 @@ github.com/thlib/go-timezone-local v0.0.7/go.mod h1:/Tnicc6m/lsJE0irFMA0LfIwTBo4
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/trinodb/trino-go-client v0.329.0 h1:tAQR5oXsW81C+lA0xiZsyoOcD7qYLv6Rtdw7SqH5Cy0=
|
||||
github.com/trinodb/trino-go-client v0.329.0/go.mod h1:BXj9QNy6pA4Gn8eIu9dVdRhetABCjFAOZ6xxsVsOZJE=
|
||||
github.com/valkey-io/valkey-go v1.0.64 h1:3u4+b6D6zs9JQs254TLy4LqitCMHHr9XorP9GGk7XY4=
|
||||
github.com/valkey-io/valkey-go v1.0.64/go.mod h1:bHmwjIEOrGq/ubOJfh5uMRs7Xj6mV3mQ/ZXUbmqpjqY=
|
||||
github.com/valkey-io/valkey-go v1.0.66 h1:DIEF1XpwbO78xK2sMTghYE3Bz6pePWJTNxKtgoAuA3A=
|
||||
github.com/valkey-io/valkey-go v1.0.66/go.mod h1:bHmwjIEOrGq/ubOJfh5uMRs7Xj6mV3mQ/ZXUbmqpjqY=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 h1:Su7DPu48wXMwC3bs7MCNG+z4FhcyEuz5dlvchbq0B0c=
|
||||
github.com/xdg-go/pbkdf2 v1.0.0/go.mod h1:jrpuAogTd400dnrH08LKmI/xc1MbPOebTwRqcT5RDeI=
|
||||
github.com/xdg-go/scram v1.1.1/go.mod h1:RaEWvsqvNKKvBPvcKeFjrG2cJqOkHTiyTpzz23ni57g=
|
||||
@@ -1834,8 +1836,8 @@ google.golang.org/api v0.108.0/go.mod h1:2Ts0XTHNVWxypznxWOYUeI4g3WdP9Pk2Qk58+a/
|
||||
google.golang.org/api v0.110.0/go.mod h1:7FC4Vvx1Mooxh8C5HWjzZHcavuS2f6pmJpZx60ca7iI=
|
||||
google.golang.org/api v0.111.0/go.mod h1:qtFHvU9mhgTJegR31csQ+rwxyUTHOKFqCKWp1J0fdw0=
|
||||
google.golang.org/api v0.114.0/go.mod h1:ifYI2ZsFK6/uGddGfAD5BMxlnkBqCmqHSDUVi45N5Yg=
|
||||
google.golang.org/api v0.250.0 h1:qvkwrf/raASj82UegU2RSDGWi/89WkLckn4LuO4lVXM=
|
||||
google.golang.org/api v0.250.0/go.mod h1:Y9Uup8bDLJJtMzJyQnu+rLRJLA0wn+wTtc6vTlOvfXo=
|
||||
google.golang.org/api v0.251.0 h1:6lea5nHRT8RUmpy9kkC2PJYnhnDAB13LqrLSVQlMIE8=
|
||||
google.golang.org/api v0.251.0/go.mod h1:Rwy0lPf/TD7+T2VhYcffCHhyyInyuxGjICxdfLqT7KI=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
@@ -1976,12 +1978,12 @@ google.golang.org/genproto v0.0.0-20230323212658-478b75c54725/go.mod h1:UUQDJDOl
|
||||
google.golang.org/genproto v0.0.0-20230330154414-c0448cd141ea/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak=
|
||||
google.golang.org/genproto v0.0.0-20230331144136-dcfb400f0633/go.mod h1:UUQDJDOlWu4KYeJZffbWgBkS1YFobzKbLVfK69pe0Ak=
|
||||
google.golang.org/genproto v0.0.0-20230410155749-daa745c078e1/go.mod h1:nKE/iIaLqn2bQwXBg8f1g2Ylh6r5MN5CmZvuzZCgsCU=
|
||||
google.golang.org/genproto v0.0.0-20250922171735-9219d122eba9 h1:LvZVVaPE0JSqL+ZWb6ErZfnEOKIqqFWUJE2D0fObSmc=
|
||||
google.golang.org/genproto v0.0.0-20250922171735-9219d122eba9/go.mod h1:QFOrLhdAe2PsTp3vQY4quuLKTi9j3XG3r6JPPaw7MSc=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250908214217-97024824d090 h1:d8Nakh1G+ur7+P3GcMjpRDEkoLUcLW2iU92XVqR+XMQ=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250908214217-97024824d090/go.mod h1:U8EXRNSd8sUYyDfs/It7KVWodQr+Hf9xtxyxWudSwEw=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090 h1:/OQuEa4YWtDt7uQWHd3q3sUMb+QOLQUg1xa8CEsRv5w=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og=
|
||||
google.golang.org/genproto v0.0.0-20250929231259-57b25ae835d4 h1:HmI33/XNQ1jVwhb5ZUgot40oiwFHa2l5ZNkQpj8VaEg=
|
||||
google.golang.org/genproto v0.0.0-20250929231259-57b25ae835d4/go.mod h1:OqVwZqqGV3h7k+YCVWXoTtwC2cs55RnDEUVMMadhxrc=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250922171735-9219d122eba9 h1:jm6v6kMRpTYKxBRrDkYAitNJegUeO1Mf3Kt80obv0gg=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20250922171735-9219d122eba9/go.mod h1:LmwNphe5Afor5V3R5BppOULHOnt2mCIf+NxMd4XiygE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250929231259-57b25ae835d4 h1:i8QOKZfYg6AbGVZzUAY3LrNWCKF8O6zFisU9Wl9RER4=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20250929231259-57b25ae835d4/go.mod h1:HSkG/KdJWusxU1F6CNrwNDjBMgisKxGnc5dAZfT0mjQ=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
|
||||
@@ -37,6 +37,7 @@ var expectedToolSources = []string{
|
||||
"cloud-sql-postgres",
|
||||
"dataplex",
|
||||
"firestore",
|
||||
"looker-conversational-analytics",
|
||||
"looker",
|
||||
"mssql",
|
||||
"mysql",
|
||||
@@ -108,6 +109,8 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
cloudsqlmssql_config, _ := Get("cloud-sql-mssql")
|
||||
dataplex_config, _ := Get("dataplex")
|
||||
firestoreconfig, _ := Get("firestore")
|
||||
looker_config, _ := Get("looker")
|
||||
lookerca_config, _ := Get("looker-conversational-analytics")
|
||||
mysql_config, _ := Get("mysql")
|
||||
mssql_config, _ := Get("mssql")
|
||||
oceanbase_config, _ := Get("oceanbase")
|
||||
@@ -164,6 +167,12 @@ func TestGetPrebuiltTool(t *testing.T) {
|
||||
if len(firestoreconfig) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch firestore prebuilt tools yaml")
|
||||
}
|
||||
if len(looker_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch looker prebuilt tools yaml")
|
||||
}
|
||||
if len(lookerca_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch looker-conversational-analytics prebuilt tools yaml")
|
||||
}
|
||||
if len(mysql_config) <= 0 {
|
||||
t.Fatalf("unexpected error: could not fetch mysql prebuilt tools yaml")
|
||||
}
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
# Copyright 2025 Google LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
sources:
|
||||
looker-source:
|
||||
kind: looker
|
||||
base_url: ${LOOKER_BASE_URL}
|
||||
client_id: ${LOOKER_CLIENT_ID:}
|
||||
client_secret: ${LOOKER_CLIENT_SECRET:}
|
||||
verify_ssl: ${LOOKER_VERIFY_SSL:true}
|
||||
timeout: 600s
|
||||
use_client_oauth: ${LOOKER_USE_CLIENT_OAUTH:false}
|
||||
project: ${LOOKER_PROJECT:}
|
||||
location: ${LOOKER_LOCATION:}
|
||||
|
||||
tools:
|
||||
ask_data_insights:
|
||||
kind: looker-conversational-analytics
|
||||
source: looker-source
|
||||
description: |
|
||||
Use this tool to perform data analysis, get insights,
|
||||
or answer complex questions about the contents of specific
|
||||
Looker explores.
|
||||
|
||||
get_models:
|
||||
kind: looker-get-models
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_models tool retrieves the list of LookML models in the Looker system.
|
||||
|
||||
It takes no parameters.
|
||||
|
||||
get_explores:
|
||||
kind: looker-get-explores
|
||||
source: looker-source
|
||||
description: |
|
||||
The get_explores tool retrieves the list of explores defined in a LookML model
|
||||
in the Looker system.
|
||||
|
||||
It takes one parameter, the model_name looked up from get_models.
|
||||
|
||||
toolsets:
|
||||
looker_conversational_analytics_tools:
|
||||
- ask_data_insights
|
||||
- get_models
|
||||
- get_explores
|
||||
@@ -695,6 +695,56 @@ tools:
|
||||
This tool can be called many times for one dashboard_id
|
||||
and the resulting tiles will be added in order.
|
||||
|
||||
health_pulse:
|
||||
kind: looker-health-pulse
|
||||
source: looker-source
|
||||
description: |
|
||||
health-pulse Tool
|
||||
|
||||
This tool takes the pulse of a Looker instance by taking
|
||||
one of the following actions:
|
||||
1. `check_db_connections`,
|
||||
2. `check_dashboard_performance`,
|
||||
3. `check_dashboard_errors`,
|
||||
4. `check_explore_performance`,
|
||||
5. `check_schedule_failures`, or
|
||||
6. `check_legacy_features`
|
||||
|
||||
health_analyze:
|
||||
kind: looker-health-analyze
|
||||
source: looker-source
|
||||
description: |
|
||||
health-analyze Tool
|
||||
|
||||
This tool calculates the usage of projects, models and explores.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "projects", "models", or "explores"
|
||||
2. `project`: the project to analyze (optional)
|
||||
3. `model`: the model to analyze (optional)
|
||||
4. `explore`: the explore to analyze (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
|
||||
health_vacuum:
|
||||
kind: looker-health-vacuum
|
||||
source: looker-source
|
||||
description: |
|
||||
health-vacuum Tool
|
||||
|
||||
This tool suggests models or explores that can removed
|
||||
because they are unused.
|
||||
|
||||
It accepts 6 parameters:
|
||||
1. `action`: can be "models" or "explores"
|
||||
2. `project`: the project to vacuum (optional)
|
||||
3. `model`: the model to vacuum (optional)
|
||||
4. `explore`: the explore to vacuum (optional)
|
||||
5. `timeframe`: the lookback period in days, default is 90
|
||||
6. `min_queries`: the minimum number of queries to consider a resource as active, default is 1
|
||||
|
||||
The result is a list of objects that are candidates for deletion.
|
||||
|
||||
toolsets:
|
||||
looker_tools:
|
||||
- get_models
|
||||
@@ -712,3 +762,6 @@ toolsets:
|
||||
- get_dashboards
|
||||
- make_dashboard
|
||||
- add_dashboard_element
|
||||
- health_pulse
|
||||
- health_analyze
|
||||
- health_vacuum
|
||||
@@ -20,6 +20,7 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
dataplexapi "cloud.google.com/go/dataplex/apiv1"
|
||||
@@ -36,11 +37,22 @@ import (
|
||||
|
||||
const SourceKind string = "bigquery"
|
||||
|
||||
const (
|
||||
// No write operations are allowed.
|
||||
WriteModeBlocked string = "blocked"
|
||||
// Only protected write operations are allowed in a BigQuery session.
|
||||
WriteModeProtected string = "protected"
|
||||
// All write operations are allowed.
|
||||
WriteModeAllowed string = "allowed"
|
||||
)
|
||||
|
||||
// validate interface
|
||||
var _ sources.SourceConfig = Config{}
|
||||
|
||||
type BigqueryClientCreator func(tokenString string, wantRestService bool) (*bigqueryapi.Client, *bigqueryrestapi.Service, error)
|
||||
|
||||
type BigQuerySessionProvider func(ctx context.Context) (*Session, error)
|
||||
|
||||
type DataplexClientCreator func(tokenString string) (*dataplexapi.CatalogClient, error)
|
||||
|
||||
func init() {
|
||||
@@ -63,6 +75,7 @@ type Config struct {
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Project string `yaml:"project" validate:"required"`
|
||||
Location string `yaml:"location"`
|
||||
WriteMode string `yaml:"writeMode"`
|
||||
AllowedDatasets []string `yaml:"allowedDatasets"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
}
|
||||
@@ -73,6 +86,14 @@ func (r Config) SourceConfigKind() string {
|
||||
}
|
||||
|
||||
func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.Source, error) {
|
||||
if r.WriteMode == "" {
|
||||
r.WriteMode = WriteModeAllowed
|
||||
}
|
||||
|
||||
if r.WriteMode == WriteModeProtected && r.UseClientOAuth {
|
||||
return nil, fmt.Errorf("writeMode 'protected' cannot be used with useClientOAuth 'true'")
|
||||
}
|
||||
|
||||
var client *bigqueryapi.Client
|
||||
var restService *bigqueryrestapi.Service
|
||||
var tokenSource oauth2.TokenSource
|
||||
@@ -106,18 +127,20 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
datasetID = parts[1]
|
||||
allowedFullID = allowed
|
||||
} else {
|
||||
projectID = client.Project()
|
||||
projectID = r.Project
|
||||
datasetID = allowed
|
||||
allowedFullID = fmt.Sprintf("%s.%s", projectID, datasetID)
|
||||
}
|
||||
|
||||
dataset := client.DatasetInProject(projectID, datasetID)
|
||||
_, err := dataset.Metadata(ctx)
|
||||
if err != nil {
|
||||
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound {
|
||||
return nil, fmt.Errorf("allowedDataset '%s' not found in project '%s'", datasetID, projectID)
|
||||
if client != nil {
|
||||
dataset := client.DatasetInProject(projectID, datasetID)
|
||||
_, err := dataset.Metadata(ctx)
|
||||
if err != nil {
|
||||
if gerr, ok := err.(*googleapi.Error); ok && gerr.Code == http.StatusNotFound {
|
||||
return nil, fmt.Errorf("allowedDataset '%s' not found in project '%s'", datasetID, projectID)
|
||||
}
|
||||
return nil, fmt.Errorf("failed to verify allowedDataset '%s' in project '%s': %w", datasetID, projectID, err)
|
||||
}
|
||||
return nil, fmt.Errorf("failed to verify allowedDataset '%s' in project '%s': %w", datasetID, projectID, err)
|
||||
}
|
||||
allowedDatasets[allowedFullID] = struct{}{}
|
||||
}
|
||||
@@ -133,9 +156,15 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
TokenSource: tokenSource,
|
||||
MaxQueryResultRows: 50,
|
||||
ClientCreator: clientCreator,
|
||||
WriteMode: r.WriteMode,
|
||||
AllowedDatasets: allowedDatasets,
|
||||
UseClientOAuth: r.UseClientOAuth,
|
||||
}
|
||||
s.SessionProvider = s.newBigQuerySessionProvider()
|
||||
|
||||
if r.WriteMode != WriteModeAllowed && r.WriteMode != WriteModeBlocked && r.WriteMode != WriteModeProtected {
|
||||
return nil, fmt.Errorf("invalid writeMode %q: must be one of %q, %q, or %q", r.WriteMode, WriteModeAllowed, WriteModeProtected, WriteModeBlocked)
|
||||
}
|
||||
s.makeDataplexCatalogClient = s.lazyInitDataplexClient(ctx, tracer)
|
||||
return s, nil
|
||||
|
||||
@@ -156,7 +185,19 @@ type Source struct {
|
||||
ClientCreator BigqueryClientCreator
|
||||
AllowedDatasets map[string]struct{}
|
||||
UseClientOAuth bool
|
||||
WriteMode string
|
||||
sessionMutex sync.Mutex
|
||||
makeDataplexCatalogClient func() (*dataplexapi.CatalogClient, DataplexClientCreator, error)
|
||||
SessionProvider BigQuerySessionProvider
|
||||
Session *Session
|
||||
}
|
||||
|
||||
type Session struct {
|
||||
ID string
|
||||
ProjectID string
|
||||
DatasetID string
|
||||
CreationTime time.Time
|
||||
LastUsed time.Time
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
@@ -172,6 +213,103 @@ func (s *Source) BigQueryRestService() *bigqueryrestapi.Service {
|
||||
return s.RestService
|
||||
}
|
||||
|
||||
func (s *Source) BigQueryWriteMode() string {
|
||||
return s.WriteMode
|
||||
}
|
||||
|
||||
func (s *Source) BigQuerySession() BigQuerySessionProvider {
|
||||
return s.SessionProvider
|
||||
}
|
||||
|
||||
func (s *Source) newBigQuerySessionProvider() BigQuerySessionProvider {
|
||||
return func(ctx context.Context) (*Session, error) {
|
||||
if s.WriteMode != WriteModeProtected {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
s.sessionMutex.Lock()
|
||||
defer s.sessionMutex.Unlock()
|
||||
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get logger from context: %w", err)
|
||||
}
|
||||
|
||||
if s.Session != nil {
|
||||
// Absolute 7-day lifetime check.
|
||||
const sessionMaxLifetime = 7 * 24 * time.Hour
|
||||
// This assumes a single task will not exceed 30 minutes, preventing it from failing mid-execution.
|
||||
const refreshThreshold = 30 * time.Minute
|
||||
if time.Since(s.Session.CreationTime) > (sessionMaxLifetime - refreshThreshold) {
|
||||
logger.DebugContext(ctx, "Session is approaching its 7-day maximum lifetime. Creating a new one.")
|
||||
} else {
|
||||
job := &bigqueryrestapi.Job{
|
||||
Configuration: &bigqueryrestapi.JobConfiguration{
|
||||
DryRun: true,
|
||||
Query: &bigqueryrestapi.JobConfigurationQuery{
|
||||
Query: "SELECT 1",
|
||||
UseLegacySql: new(bool),
|
||||
ConnectionProperties: []*bigqueryrestapi.ConnectionProperty{{Key: "session_id", Value: s.Session.ID}},
|
||||
},
|
||||
},
|
||||
}
|
||||
_, err := s.RestService.Jobs.Insert(s.Project, job).Do()
|
||||
if err == nil {
|
||||
s.Session.LastUsed = time.Now()
|
||||
return s.Session, nil
|
||||
}
|
||||
logger.DebugContext(ctx, "Session validation failed (likely expired), creating a new one.", "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Create a new session if one doesn't exist, it has passed its 7-day lifetime,
|
||||
// or it failed the validation dry run.
|
||||
|
||||
creationTime := time.Now()
|
||||
job := &bigqueryrestapi.Job{
|
||||
JobReference: &bigqueryrestapi.JobReference{
|
||||
ProjectId: s.Project,
|
||||
Location: s.Location,
|
||||
},
|
||||
Configuration: &bigqueryrestapi.JobConfiguration{
|
||||
DryRun: true,
|
||||
Query: &bigqueryrestapi.JobConfigurationQuery{
|
||||
Query: "SELECT 1",
|
||||
CreateSession: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
createdJob, err := s.RestService.Jobs.Insert(s.Project, job).Do()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create new session: %w", err)
|
||||
}
|
||||
|
||||
var sessionID, sessionDatasetID, projectID string
|
||||
if createdJob.Status != nil && createdJob.Statistics.SessionInfo != nil {
|
||||
sessionID = createdJob.Statistics.SessionInfo.SessionId
|
||||
} else {
|
||||
return nil, fmt.Errorf("failed to get session ID from new session job")
|
||||
}
|
||||
|
||||
if createdJob.Configuration != nil && createdJob.Configuration.Query != nil && createdJob.Configuration.Query.DestinationTable != nil {
|
||||
sessionDatasetID = createdJob.Configuration.Query.DestinationTable.DatasetId
|
||||
projectID = createdJob.Configuration.Query.DestinationTable.ProjectId
|
||||
} else {
|
||||
return nil, fmt.Errorf("failed to get session dataset ID from new session job")
|
||||
}
|
||||
|
||||
s.Session = &Session{
|
||||
ID: sessionID,
|
||||
ProjectID: projectID,
|
||||
DatasetID: sessionDatasetID,
|
||||
CreationTime: creationTime,
|
||||
LastUsed: creationTime,
|
||||
}
|
||||
return s.Session, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Source) UseClientAuthorization() bool {
|
||||
return s.UseClientOAuth
|
||||
}
|
||||
@@ -257,7 +395,7 @@ func initBigQueryConnection(
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
cred, err := google.FindDefaultCredentials(ctx, bigqueryapi.Scope)
|
||||
cred, err := google.FindDefaultCredentials(ctx, "https://www.googleapis.com/auth/cloud-platform")
|
||||
if err != nil {
|
||||
return nil, nil, nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", bigqueryapi.Scope, err)
|
||||
}
|
||||
|
||||
@@ -37,14 +37,34 @@ func TestParseFromYamlBigQuery(t *testing.T) {
|
||||
my-instance:
|
||||
kind: bigquery
|
||||
project: my-project
|
||||
location: us
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "my-project",
|
||||
Location: "",
|
||||
WriteMode: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
desc: "all fields specified",
|
||||
in: `
|
||||
sources:
|
||||
my-instance:
|
||||
kind: bigquery
|
||||
project: my-project
|
||||
location: asia
|
||||
writeMode: blocked
|
||||
`,
|
||||
want: server.SourceConfigs{
|
||||
"my-instance": bigquery.Config{
|
||||
Name: "my-instance",
|
||||
Kind: bigquery.SourceKind,
|
||||
Project: "my-project",
|
||||
Location: "us",
|
||||
Location: "asia",
|
||||
WriteMode: "blocked",
|
||||
UseClientOAuth: false,
|
||||
},
|
||||
},
|
||||
|
||||
@@ -111,20 +111,25 @@ func initCloudSQLMssqlConnection(ctx context.Context, tracer trace.Tracer, name,
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
// Create dsn
|
||||
query := fmt.Sprintf("database=%s&cloudsql=%s:%s:%s", dbname, project, region, instance)
|
||||
url := &url.URL{
|
||||
Scheme: "sqlserver",
|
||||
User: url.UserPassword(user, pass),
|
||||
Host: ipAddress,
|
||||
RawQuery: query,
|
||||
}
|
||||
|
||||
// Get dial options
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Create dsn
|
||||
query := url.Values{}
|
||||
query.Add("app name", userAgent)
|
||||
query.Add("database", dbname)
|
||||
query.Add("cloudsql", fmt.Sprintf("%s:%s:%s", project, region, instance))
|
||||
|
||||
url := &url.URL{
|
||||
Scheme: "sqlserver",
|
||||
User: url.UserPassword(user, pass),
|
||||
Host: ipAddress,
|
||||
RawQuery: query.Encode(),
|
||||
}
|
||||
|
||||
// Get dial options
|
||||
opts, err := sources.GetCloudSQLOpts(ipType, userAgent, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -118,9 +118,8 @@ func initCloudSQLMySQLConnectionPool(ctx context.Context, tracer trace.Tracer, n
|
||||
return nil, fmt.Errorf("unable to register driver: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// Tell the driver to use the Cloud SQL Go Connector to create connections
|
||||
dsn := fmt.Sprintf("%s:%s@cloudsql-mysql(%s:%s:%s)/%s", user, pass, project, region, instance, dbname)
|
||||
dsn := fmt.Sprintf("%s:%s@cloudsql-mysql(%s:%s:%s)/%s?connectionAttributes=program_name:%s", user, pass, project, region, instance, dbname, userAgent)
|
||||
db, err := sql.Open(
|
||||
"cloudsql-mysql",
|
||||
dsn,
|
||||
|
||||
@@ -18,10 +18,13 @@ import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
geminidataanalytics "cloud.google.com/go/geminidataanalytics/apiv1beta"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
"golang.org/x/oauth2"
|
||||
"golang.org/x/oauth2/google"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
@@ -47,6 +50,7 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (sources
|
||||
ShowHiddenModels: true,
|
||||
ShowHiddenExplores: true,
|
||||
ShowHiddenFields: true,
|
||||
Location: "us",
|
||||
} // Default Ssl,timeout, ShowHidden
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
@@ -66,6 +70,8 @@ type Config struct {
|
||||
ShowHiddenModels bool `yaml:"show_hidden_models"`
|
||||
ShowHiddenExplores bool `yaml:"show_hidden_explores"`
|
||||
ShowHiddenFields bool `yaml:"show_hidden_fields"`
|
||||
Project string `yaml:"project"`
|
||||
Location string `yaml:"location"`
|
||||
}
|
||||
|
||||
func (r Config) SourceConfigKind() string {
|
||||
@@ -102,6 +108,9 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
ClientSecret: r.ClientSecret,
|
||||
}
|
||||
|
||||
var tokenSource oauth2.TokenSource
|
||||
tokenSource, _ = initGoogleCloudConnection(ctx)
|
||||
|
||||
s := &Source{
|
||||
Name: r.Name,
|
||||
Kind: SourceKind,
|
||||
@@ -111,6 +120,9 @@ func (r Config) Initialize(ctx context.Context, tracer trace.Tracer) (sources.So
|
||||
ShowHiddenModels: r.ShowHiddenModels,
|
||||
ShowHiddenExplores: r.ShowHiddenExplores,
|
||||
ShowHiddenFields: r.ShowHiddenFields,
|
||||
Project: r.Project,
|
||||
Location: r.Location,
|
||||
TokenSource: tokenSource,
|
||||
}
|
||||
|
||||
if !r.UseClientOAuth {
|
||||
@@ -137,12 +149,48 @@ type Source struct {
|
||||
Timeout string `yaml:"timeout"`
|
||||
Client *v4.LookerSDK
|
||||
ApiSettings *rtl.ApiSettings
|
||||
UseClientOAuth bool `yaml:"use_client_oauth"`
|
||||
ShowHiddenModels bool `yaml:"show_hidden_models"`
|
||||
ShowHiddenExplores bool `yaml:"show_hidden_explores"`
|
||||
ShowHiddenFields bool `yaml:"show_hidden_fields"`
|
||||
UseClientOAuth bool `yaml:"use_client_oauth"`
|
||||
ShowHiddenModels bool `yaml:"show_hidden_models"`
|
||||
ShowHiddenExplores bool `yaml:"show_hidden_explores"`
|
||||
ShowHiddenFields bool `yaml:"show_hidden_fields"`
|
||||
Project string `yaml:"project"`
|
||||
Location string `yaml:"location"`
|
||||
TokenSource oauth2.TokenSource
|
||||
}
|
||||
|
||||
func (s *Source) SourceKind() string {
|
||||
return SourceKind
|
||||
}
|
||||
|
||||
func (s *Source) GetApiSettings() *rtl.ApiSettings {
|
||||
return s.ApiSettings
|
||||
}
|
||||
|
||||
func (s *Source) UseClientAuthorization() bool {
|
||||
return s.UseClientOAuth
|
||||
}
|
||||
|
||||
func (s *Source) GoogleCloudProject() string {
|
||||
return s.Project
|
||||
}
|
||||
|
||||
func (s *Source) GoogleCloudLocation() string {
|
||||
return s.Location
|
||||
}
|
||||
|
||||
func (s *Source) GoogleCloudTokenSource() oauth2.TokenSource {
|
||||
return s.TokenSource
|
||||
}
|
||||
|
||||
func (s *Source) GoogleCloudTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error) {
|
||||
return google.DefaultTokenSource(ctx, scope)
|
||||
}
|
||||
|
||||
func initGoogleCloudConnection(ctx context.Context) (oauth2.TokenSource, error) {
|
||||
cred, err := google.FindDefaultCredentials(ctx, geminidataanalytics.DefaultAuthScopes()...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to find default Google Cloud credentials with scope %q: %w", geminidataanalytics.DefaultAuthScopes(), err)
|
||||
}
|
||||
|
||||
return cred.TokenSource, nil
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ func TestParseFromYamlLooker(t *testing.T) {
|
||||
ShowHiddenModels: true,
|
||||
ShowHiddenExplores: true,
|
||||
ShowHiddenFields: true,
|
||||
Location: "us",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -90,9 +91,9 @@ func TestFailParseFromYamlLooker(t *testing.T) {
|
||||
base_url: http://example.looker.com/
|
||||
client_id: jasdl;k;tjl
|
||||
client_secret: sdakl;jgflkasdfkfg
|
||||
project: test-project
|
||||
schema: test-schema
|
||||
`,
|
||||
err: "unable to parse source \"my-looker-instance\" as \"looker\": [5:1] unknown field \"project\"\n 2 | client_id: jasdl;k;tjl\n 3 | client_secret: sdakl;jgflkasdfkfg\n 4 | kind: looker\n> 5 | project: test-project\n ^\n",
|
||||
err: "unable to parse source \"my-looker-instance\" as \"looker\": [5:1] unknown field \"schema\"\n 2 | client_id: jasdl;k;tjl\n 3 | client_secret: sdakl;jgflkasdfkfg\n 4 | kind: looker\n> 5 | schema: test-schema\n ^\n",
|
||||
},
|
||||
{
|
||||
desc: "missing required field",
|
||||
@@ -100,6 +101,7 @@ func TestFailParseFromYamlLooker(t *testing.T) {
|
||||
sources:
|
||||
my-looker-instance:
|
||||
kind: looker
|
||||
client_id: jasdl;k;tjl
|
||||
`,
|
||||
err: "unable to parse source \"my-looker-instance\" as \"looker\": Key: 'Config.BaseURL' Error:Field validation for 'BaseURL' failed on the 'required' tag",
|
||||
},
|
||||
|
||||
@@ -22,6 +22,7 @@ import (
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
_ "github.com/microsoft/go-mssqldb"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
@@ -114,8 +115,13 @@ func initMssqlConnection(
|
||||
ctx, span := sources.InitConnectionSpan(ctx, tracer, SourceKind, name)
|
||||
defer span.End()
|
||||
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
userAgent = "genai-toolbox"
|
||||
}
|
||||
// Create dsn
|
||||
query := url.Values{}
|
||||
query.Add("app name", userAgent)
|
||||
query.Add("database", dbname)
|
||||
if encrypt != "" {
|
||||
query.Add("encrypt", encrypt)
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
_ "github.com/go-sql-driver/mysql"
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
)
|
||||
|
||||
@@ -122,7 +123,11 @@ func initMySQLConnectionPool(ctx context.Context, tracer trace.Tracer, name, hos
|
||||
values.Set(k, v)
|
||||
}
|
||||
|
||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?parseTime=true", user, pass, host, port, dbname)
|
||||
userAgent, err := util.UserAgentFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dsn := fmt.Sprintf("%s:%s@tcp(%s:%s)/%s?parseTime=true&connectionAttributes=program_name:%s", user, pass, host, port, dbname, userAgent)
|
||||
if enc := values.Encode(); enc != "" {
|
||||
dsn += "&" + enc
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ type compatibleSource interface {
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
BigQuerySession() bigqueryds.BigQuerySessionProvider
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -122,16 +123,17 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
SessionProvider: s.BigQuerySession(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -146,11 +148,12 @@ type Tool struct {
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
SessionProvider bigqueryds.BigQuerySessionProvider
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
// Invoke runs the contribution analysis.
|
||||
@@ -222,7 +225,22 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
}
|
||||
|
||||
createModelQuery := bqClient.Query(createModelSQL)
|
||||
createModelQuery.CreateSession = true
|
||||
|
||||
// Get session from provider if in protected mode.
|
||||
// Otherwise, a new session will be created by the first query.
|
||||
session, err := t.SessionProvider(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
|
||||
}
|
||||
|
||||
if session != nil {
|
||||
createModelQuery.ConnectionProperties = []*bigqueryapi.ConnectionProperty{
|
||||
{Key: "session_id", Value: session.ID},
|
||||
}
|
||||
} else {
|
||||
// If not in protected mode, create a session for this invocation.
|
||||
createModelQuery.CreateSession = true
|
||||
}
|
||||
createModelJob, err := createModelQuery.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to start create model job: %w", err)
|
||||
@@ -236,16 +254,21 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("create model job failed: %w", err)
|
||||
}
|
||||
|
||||
if status.Statistics == nil || status.Statistics.SessionInfo == nil || status.Statistics.SessionInfo.SessionID == "" {
|
||||
return nil, fmt.Errorf("failed to create a BigQuery session")
|
||||
// Determine the session ID to use for subsequent queries.
|
||||
// It's either from the pre-existing session (protected mode) or the one just created.
|
||||
var sessionID string
|
||||
if session != nil {
|
||||
sessionID = session.ID
|
||||
} else if status.Statistics != nil && status.Statistics.SessionInfo != nil {
|
||||
sessionID = status.Statistics.SessionInfo.SessionID
|
||||
} else {
|
||||
return nil, fmt.Errorf("failed to get or create a BigQuery session ID")
|
||||
}
|
||||
sessionID := status.Statistics.SessionInfo.SessionID
|
||||
|
||||
getInsightsSQL := fmt.Sprintf("SELECT * FROM ML.GET_INSIGHTS(MODEL %s)", modelID)
|
||||
|
||||
getInsightsQuery := bqClient.Query(getInsightsSQL)
|
||||
getInsightsQuery.ConnectionProperties = []*bigqueryapi.ConnectionProperty{
|
||||
{Key: "session_id", Value: sessionID},
|
||||
}
|
||||
getInsightsQuery.ConnectionProperties = []*bigqueryapi.ConnectionProperty{{Key: "session_id", Value: sessionID}}
|
||||
|
||||
job, err := getInsightsQuery.Run(ctx)
|
||||
if err != nil {
|
||||
|
||||
@@ -17,8 +17,11 @@ package bigquerycommon
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bigqueryrestapi "google.golang.org/api/bigquery/v2"
|
||||
)
|
||||
|
||||
@@ -53,3 +56,65 @@ func DryRunQuery(ctx context.Context, restService *bigqueryrestapi.Service, proj
|
||||
}
|
||||
return insertResponse, nil
|
||||
}
|
||||
|
||||
// BQTypeStringFromToolType converts a tool parameter type string to a BigQuery standard SQL type string.
|
||||
func BQTypeStringFromToolType(toolType string) (string, error) {
|
||||
switch toolType {
|
||||
case "string":
|
||||
return "STRING", nil
|
||||
case "integer":
|
||||
return "INT64", nil
|
||||
case "float":
|
||||
return "FLOAT64", nil
|
||||
case "boolean":
|
||||
return "BOOL", nil
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported tool parameter type for BigQuery: %s", toolType)
|
||||
}
|
||||
}
|
||||
|
||||
// InitializeDatasetParameters generates project and dataset tool parameters based on allowedDatasets.
|
||||
func InitializeDatasetParameters(
|
||||
allowedDatasets []string,
|
||||
defaultProjectID string,
|
||||
projectKey, datasetKey string,
|
||||
projectDescription, datasetDescription string,
|
||||
) (projectParam, datasetParam tools.Parameter) {
|
||||
if len(allowedDatasets) > 0 {
|
||||
if len(allowedDatasets) == 1 {
|
||||
parts := strings.Split(allowedDatasets[0], ".")
|
||||
defaultProjectID = parts[0]
|
||||
datasetID := parts[1]
|
||||
projectDescription += fmt.Sprintf(" Must be `%s`.", defaultProjectID)
|
||||
datasetDescription += fmt.Sprintf(" Must be `%s`.", datasetID)
|
||||
datasetParam = tools.NewStringParameterWithDefault(datasetKey, datasetID, datasetDescription)
|
||||
} else {
|
||||
datasetIDsByProject := make(map[string][]string)
|
||||
for _, ds := range allowedDatasets {
|
||||
parts := strings.Split(ds, ".")
|
||||
project := parts[0]
|
||||
dataset := parts[1]
|
||||
datasetIDsByProject[project] = append(datasetIDsByProject[project], fmt.Sprintf("`%s`", dataset))
|
||||
}
|
||||
|
||||
var datasetDescriptions, projectIDList []string
|
||||
for project, datasets := range datasetIDsByProject {
|
||||
sort.Strings(datasets)
|
||||
projectIDList = append(projectIDList, fmt.Sprintf("`%s`", project))
|
||||
datasetList := strings.Join(datasets, ", ")
|
||||
datasetDescriptions = append(datasetDescriptions, fmt.Sprintf("%s from project `%s`", datasetList, project))
|
||||
}
|
||||
sort.Strings(projectIDList)
|
||||
sort.Strings(datasetDescriptions)
|
||||
projectDescription += fmt.Sprintf(" Must be one of the following: %s.", strings.Join(projectIDList, ", "))
|
||||
datasetDescription += fmt.Sprintf(" Must be one of the allowed datasets: %s.", strings.Join(datasetDescriptions, "; "))
|
||||
datasetParam = tools.NewStringParameter(datasetKey, datasetDescription)
|
||||
}
|
||||
} else {
|
||||
datasetParam = tools.NewStringParameter(datasetKey, datasetDescription)
|
||||
}
|
||||
|
||||
projectParam = tools.NewStringParameterWithDefault(projectKey, defaultProjectID, projectDescription)
|
||||
|
||||
return projectParam, datasetParam
|
||||
}
|
||||
|
||||
@@ -49,6 +49,8 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQuerySession() bigqueryds.BigQuerySessionProvider
|
||||
BigQueryWriteMode() string
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
@@ -89,33 +91,43 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
sqlDescription := "The sql to execute."
|
||||
var sqlDescriptionBuilder strings.Builder
|
||||
switch s.BigQueryWriteMode() {
|
||||
case bigqueryds.WriteModeBlocked:
|
||||
sqlDescriptionBuilder.WriteString("The SQL to execute. In 'blocked' mode, only SELECT statements are allowed; other statement types will fail.")
|
||||
case bigqueryds.WriteModeProtected:
|
||||
sqlDescriptionBuilder.WriteString("The SQL to execute. Only SELECT statements and writes to the session's temporary dataset are allowed (e.g., `CREATE TEMP TABLE ...`).")
|
||||
default: // WriteModeAllowed
|
||||
sqlDescriptionBuilder.WriteString("The SQL to execute.")
|
||||
}
|
||||
|
||||
allowedDatasets := s.BigQueryAllowedDatasets()
|
||||
if len(allowedDatasets) > 0 {
|
||||
datasetIDs := []string{}
|
||||
for _, ds := range allowedDatasets {
|
||||
datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds))
|
||||
}
|
||||
|
||||
if len(datasetIDs) == 1 {
|
||||
parts := strings.Split(allowedDatasets[0], ".")
|
||||
if len(allowedDatasets) == 1 {
|
||||
datasetFQN := allowedDatasets[0]
|
||||
parts := strings.Split(datasetFQN, ".")
|
||||
if len(parts) < 2 {
|
||||
return nil, fmt.Errorf("expected split to have 2 parts: %s", allowedDatasets[0])
|
||||
return nil, fmt.Errorf("expected allowedDataset to have at least 2 parts (project.dataset): %s", datasetFQN)
|
||||
}
|
||||
datasetID := parts[1]
|
||||
sqlDescription += fmt.Sprintf(" The query must only access the %s dataset. "+
|
||||
sqlDescriptionBuilder.WriteString(fmt.Sprintf(" The query must only access the `%s` dataset. "+
|
||||
"To query a table within this dataset (e.g., `my_table`), "+
|
||||
"qualify it with the dataset id (e.g., `%s.my_table`).", datasetIDs[0], datasetID)
|
||||
"qualify it with the dataset id (e.g., `%s.my_table`).", datasetFQN, datasetID))
|
||||
} else {
|
||||
sqlDescription += fmt.Sprintf(" The query must only access datasets from the following list: %s.", strings.Join(datasetIDs, ", "))
|
||||
datasetIDs := []string{}
|
||||
for _, ds := range allowedDatasets {
|
||||
datasetIDs = append(datasetIDs, fmt.Sprintf("`%s`", ds))
|
||||
}
|
||||
sqlDescriptionBuilder.WriteString(fmt.Sprintf(" The query must only access datasets from the following list: %s.", strings.Join(datasetIDs, ", ")))
|
||||
}
|
||||
}
|
||||
sqlParameter := tools.NewStringParameter("sql", sqlDescription)
|
||||
|
||||
sqlParameter := tools.NewStringParameter("sql", sqlDescriptionBuilder.String())
|
||||
dryRunParameter := tools.NewBooleanParameterWithDefault(
|
||||
"dry_run",
|
||||
false,
|
||||
"If set to true, the query will be validated and information about the execution "+
|
||||
"will be returned without running the query. Defaults to false.",
|
||||
"If set to true, the query will be validated and information about the execution will be returned "+
|
||||
"without running the query. Defaults to false.",
|
||||
)
|
||||
parameters := tools.Parameters{sqlParameter, dryRunParameter}
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
|
||||
@@ -130,6 +142,8 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
WriteMode: s.BigQueryWriteMode(),
|
||||
SessionProvider: s.BigQuerySession(),
|
||||
IsDatasetAllowed: s.IsDatasetAllowed,
|
||||
AllowedDatasets: allowedDatasets,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
@@ -150,6 +164,8 @@ type Tool struct {
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
WriteMode string
|
||||
SessionProvider bigqueryds.BigQuerySessionProvider
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
IsDatasetAllowed func(projectID, datasetID string) bool
|
||||
AllowedDatasets []string
|
||||
@@ -184,12 +200,39 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
}
|
||||
}
|
||||
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, bqClient.Project(), bqClient.Location, sql, nil, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed during dry run: %w", err)
|
||||
var connProps []*bigqueryapi.ConnectionProperty
|
||||
var session *bigqueryds.Session
|
||||
if t.WriteMode == bigqueryds.WriteModeProtected {
|
||||
session, err = t.SessionProvider(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get BigQuery session for protected mode: %w", err)
|
||||
}
|
||||
connProps = []*bigqueryapi.ConnectionProperty{
|
||||
{Key: "session_id", Value: session.ID},
|
||||
}
|
||||
}
|
||||
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, bqClient.Project(), bqClient.Location, sql, nil, connProps)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed: %w", err)
|
||||
}
|
||||
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
|
||||
switch t.WriteMode {
|
||||
case bigqueryds.WriteModeBlocked:
|
||||
if statementType != "SELECT" {
|
||||
return nil, fmt.Errorf("write mode is 'blocked', only SELECT statements are allowed")
|
||||
}
|
||||
case bigqueryds.WriteModeProtected:
|
||||
if dryRunJob.Configuration != nil && dryRunJob.Configuration.Query != nil {
|
||||
if dest := dryRunJob.Configuration.Query.DestinationTable; dest != nil && dest.DatasetId != session.DatasetID {
|
||||
return nil, fmt.Errorf("protected write mode only supports SELECT statements, or write operations in the anonymous "+
|
||||
"dataset of a BigQuery session, but destination was %q", dest.DatasetId)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(t.AllowedDatasets) > 0 {
|
||||
switch statementType {
|
||||
case "CREATE_SCHEMA", "DROP_SCHEMA", "ALTER_SCHEMA":
|
||||
@@ -259,6 +302,8 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
query := bqClient.Query(sql)
|
||||
query.Location = bqClient.Location
|
||||
|
||||
query.ConnectionProperties = connProps
|
||||
|
||||
// Log the query executed for debugging.
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
@@ -270,10 +315,14 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
// We iterate through the results, convert each row into a map of
|
||||
// column names to values, and return the collection of rows.
|
||||
var out []any
|
||||
it, err := query.Read(ctx)
|
||||
job, err := query.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
it, err := job.Read(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read query results: %w", err)
|
||||
}
|
||||
for {
|
||||
var row map[string]bigqueryapi.Value
|
||||
err = it.Next(&row)
|
||||
|
||||
@@ -53,6 +53,7 @@ type compatibleSource interface {
|
||||
UseClientAuthorization() bool
|
||||
IsDatasetAllowed(projectID, datasetID string) bool
|
||||
BigQueryAllowedDatasets() []string
|
||||
BigQuerySession() bigqueryds.BigQuerySessionProvider
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -123,6 +124,7 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
IsDatasetAllowed: s.IsDatasetAllowed,
|
||||
SessionProvider: s.BigQuerySession(),
|
||||
AllowedDatasets: allowedDatasets,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
@@ -145,6 +147,7 @@ type Tool struct {
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
IsDatasetAllowed func(projectID, datasetID string) bool
|
||||
AllowedDatasets []string
|
||||
SessionProvider bigqueryds.BigQuerySessionProvider
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
@@ -184,13 +187,39 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
}
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
restService := t.RestService
|
||||
var err error
|
||||
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, restService, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
var historyDataSource string
|
||||
trimmedUpperHistoryData := strings.TrimSpace(strings.ToUpper(historyData))
|
||||
if strings.HasPrefix(trimmedUpperHistoryData, "SELECT") || strings.HasPrefix(trimmedUpperHistoryData, "WITH") {
|
||||
if len(t.AllowedDatasets) > 0 {
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, t.RestService, t.Client.Project(), t.Client.Location, historyData, nil, nil)
|
||||
var connProps []*bigqueryapi.ConnectionProperty
|
||||
session, err := t.SessionProvider(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed during dry run: %w", err)
|
||||
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
|
||||
}
|
||||
if session != nil {
|
||||
connProps = []*bigqueryapi.ConnectionProperty{
|
||||
{Key: "session_id", Value: session.ID},
|
||||
}
|
||||
}
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, t.Client.Project(), t.Client.Location, historyData, nil, connProps)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed: %w", err)
|
||||
}
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
if statementType != "SELECT" {
|
||||
@@ -246,24 +275,19 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
horizon => %d%s)`,
|
||||
historyDataSource, dataCol, timestampCol, horizon, idColsArg)
|
||||
|
||||
bqClient := t.Client
|
||||
var err error
|
||||
|
||||
// Initialize new client if using user OAuth token
|
||||
if t.UseClientOAuth {
|
||||
tokenStr, err := accessToken.ParseBearerToken()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error parsing access token: %w", err)
|
||||
}
|
||||
bqClient, _, err = t.ClientCreator(tokenStr, false)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error creating client from OAuth access token: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
// JobStatistics.QueryStatistics.StatementType
|
||||
query := bqClient.Query(sql)
|
||||
query.Location = bqClient.Location
|
||||
session, err := t.SessionProvider(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
|
||||
}
|
||||
if session != nil {
|
||||
// Add session ID to the connection properties for subsequent calls.
|
||||
query.ConnectionProperties = []*bigqueryapi.ConnectionProperty{
|
||||
{Key: "session_id", Value: session.ID},
|
||||
}
|
||||
}
|
||||
|
||||
// Log the query executed for debugging.
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
@@ -276,10 +300,14 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
// We iterate through the results, convert each row into a map of
|
||||
// column names to values, and return the collection of rows.
|
||||
var out []any
|
||||
it, err := query.Read(ctx)
|
||||
job, err := query.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
it, err := job.Read(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read query results: %w", err)
|
||||
}
|
||||
for {
|
||||
var row map[string]bigqueryapi.Value
|
||||
err = it.Next(&row)
|
||||
|
||||
@@ -23,6 +23,7 @@ import (
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
|
||||
)
|
||||
|
||||
const kind string = "bigquery-get-table-info"
|
||||
@@ -49,6 +50,8 @@ type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
IsDatasetAllowed(projectID, datasetID string) bool
|
||||
BigQueryAllowedDatasets() []string
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -84,8 +87,19 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project ID containing the dataset and table.")
|
||||
datasetParameter := tools.NewStringParameter(datasetKey, "The table's parent dataset.")
|
||||
defaultProjectID := s.BigQueryProject()
|
||||
projectDescription := "The Google Cloud project ID containing the dataset and table."
|
||||
datasetDescription := "The table's parent dataset."
|
||||
var datasetParameter tools.Parameter
|
||||
var projectParameter tools.Parameter
|
||||
|
||||
projectParameter, datasetParameter = bqutil.InitializeDatasetParameters(
|
||||
s.BigQueryAllowedDatasets(),
|
||||
defaultProjectID,
|
||||
projectKey, datasetKey,
|
||||
projectDescription, datasetDescription,
|
||||
)
|
||||
|
||||
tableParameter := tools.NewStringParameter(tableKey, "The table to get metadata information.")
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter, tableParameter}
|
||||
|
||||
@@ -93,15 +107,16 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
IsDatasetAllowed: s.IsDatasetAllowed,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -116,11 +131,12 @@ type Tool struct {
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
IsDatasetAllowed func(projectID, datasetID string) bool
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -140,6 +156,10 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
return nil, fmt.Errorf("invalid or missing '%s' parameter; expected a string", tableKey)
|
||||
}
|
||||
|
||||
if !t.IsDatasetAllowed(projectId, datasetId) {
|
||||
return nil, fmt.Errorf("access denied to dataset '%s' because it is not in the configured list of allowed datasets for project '%s'", datasetId, projectId)
|
||||
}
|
||||
|
||||
bqClient := t.Client
|
||||
|
||||
var err error
|
||||
|
||||
@@ -48,6 +48,7 @@ type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
BigQueryAllowedDatasets() []string
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
@@ -83,7 +84,17 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), "The Google Cloud project to list dataset ids.")
|
||||
var projectParameter tools.Parameter
|
||||
var projectParameterDescription string
|
||||
|
||||
allowedDatasets := s.BigQueryAllowedDatasets()
|
||||
if len(allowedDatasets) > 0 {
|
||||
projectParameterDescription = "This parameter will be ignored. The list of datasets is restricted to a pre-configured list; No need to provide a project ID."
|
||||
} else {
|
||||
projectParameterDescription = "The Google Cloud project to list dataset ids."
|
||||
}
|
||||
|
||||
projectParameter = tools.NewStringParameterWithDefault(projectKey, s.BigQueryProject(), projectParameterDescription)
|
||||
|
||||
parameters := tools.Parameters{projectParameter}
|
||||
|
||||
@@ -91,15 +102,16 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
Client: s.BigQueryClient(),
|
||||
AllowedDatasets: allowedDatasets,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -114,14 +126,18 @@ type Tool struct {
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Client *bigqueryapi.Client
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
Statement string
|
||||
AllowedDatasets []string
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
if len(t.AllowedDatasets) > 0 {
|
||||
return t.AllowedDatasets, nil
|
||||
}
|
||||
mapParams := params.AsMap()
|
||||
projectId, ok := mapParams[projectKey].(string)
|
||||
if !ok {
|
||||
|
||||
@@ -17,14 +17,13 @@ package bigquerylisttableids
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
bigqueryapi "cloud.google.com/go/bigquery"
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
bigqueryds "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
bqutil "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerycommon"
|
||||
"google.golang.org/api/iterator"
|
||||
)
|
||||
|
||||
@@ -92,39 +91,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
projectDescription := "The Google Cloud project ID containing the dataset."
|
||||
datasetDescription := "The dataset to list table ids."
|
||||
var datasetParameter tools.Parameter
|
||||
allowedDatasets := s.BigQueryAllowedDatasets()
|
||||
if len(allowedDatasets) > 0 {
|
||||
if len(allowedDatasets) == 1 {
|
||||
parts := strings.Split(allowedDatasets[0], ".")
|
||||
defaultProjectID = parts[0]
|
||||
datasetID := parts[1]
|
||||
projectDescription += fmt.Sprintf(" Must be `%s`.", defaultProjectID)
|
||||
datasetDescription += fmt.Sprintf(" Must be `%s`.", datasetID)
|
||||
datasetParameter = tools.NewStringParameterWithDefault(datasetKey, datasetID, datasetDescription)
|
||||
} else {
|
||||
datasetIDsByProject := make(map[string][]string)
|
||||
for _, ds := range allowedDatasets {
|
||||
parts := strings.Split(ds, ".")
|
||||
project := parts[0]
|
||||
dataset := parts[1]
|
||||
datasetIDsByProject[project] = append(datasetIDsByProject[project], fmt.Sprintf("`%s`", dataset))
|
||||
}
|
||||
var projectParameter tools.Parameter
|
||||
|
||||
var datasetDescriptions, projectIDList []string
|
||||
for project, datasets := range datasetIDsByProject {
|
||||
sort.Strings(datasets)
|
||||
projectIDList = append(projectIDList, fmt.Sprintf("`%s`", project))
|
||||
datasetList := strings.Join(datasets, ", ")
|
||||
datasetDescriptions = append(datasetDescriptions, fmt.Sprintf("%s from project `%s`", datasetList, project))
|
||||
}
|
||||
projectDescription += fmt.Sprintf(" Must be one of the following: %s.", strings.Join(projectIDList, ", "))
|
||||
datasetDescription += fmt.Sprintf(" Must be one of the allowed datasets: %s.", strings.Join(datasetDescriptions, "; "))
|
||||
datasetParameter = tools.NewStringParameter(datasetKey, datasetDescription)
|
||||
}
|
||||
} else {
|
||||
datasetParameter = tools.NewStringParameter(datasetKey, datasetDescription)
|
||||
}
|
||||
projectParameter := tools.NewStringParameterWithDefault(projectKey, defaultProjectID, projectDescription)
|
||||
projectParameter, datasetParameter = bqutil.InitializeDatasetParameters(
|
||||
s.BigQueryAllowedDatasets(),
|
||||
defaultProjectID,
|
||||
projectKey, datasetKey,
|
||||
projectDescription, datasetDescription,
|
||||
)
|
||||
|
||||
parameters := tools.Parameters{projectParameter, datasetParameter}
|
||||
|
||||
|
||||
@@ -49,6 +49,8 @@ func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.T
|
||||
|
||||
type compatibleSource interface {
|
||||
BigQueryClient() *bigqueryapi.Client
|
||||
BigQuerySession() bigqueryds.BigQuerySessionProvider
|
||||
BigQueryWriteMode() string
|
||||
BigQueryRestService() *bigqueryrestapi.Service
|
||||
BigQueryClientCreator() bigqueryds.BigqueryClientCreator
|
||||
UseClientAuthorization() bool
|
||||
@@ -106,13 +108,14 @@ func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error)
|
||||
TemplateParameters: cfg.TemplateParameters,
|
||||
AllParams: allParameters,
|
||||
|
||||
Statement: cfg.Statement,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
Statement: cfg.Statement,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
Client: s.BigQueryClient(),
|
||||
RestService: s.BigQueryRestService(),
|
||||
SessionProvider: s.BigQuerySession(),
|
||||
ClientCreator: s.BigQueryClientCreator(),
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
@@ -129,12 +132,13 @@ type Tool struct {
|
||||
TemplateParameters tools.Parameters `yaml:"templateParameters"`
|
||||
AllParams tools.Parameters `yaml:"allParams"`
|
||||
|
||||
Statement string
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
Statement string
|
||||
Client *bigqueryapi.Client
|
||||
RestService *bigqueryrestapi.Service
|
||||
SessionProvider bigqueryds.BigQuerySessionProvider
|
||||
ClientCreator bigqueryds.BigqueryClientCreator
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
@@ -187,7 +191,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
if arrayParam, ok := p.(*tools.ArrayParameter); ok {
|
||||
// Handle array types based on their defined item type.
|
||||
lowLevelParam.ParameterType.Type = "ARRAY"
|
||||
itemType, err := BQTypeStringFromToolType(arrayParam.GetItems().GetType())
|
||||
itemType, err := bqutil.BQTypeStringFromToolType(arrayParam.GetItems().GetType())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -204,7 +208,7 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
lowLevelParam.ParameterValue.ArrayValues = arrayValues
|
||||
} else {
|
||||
// Handle scalar types based on their defined type.
|
||||
bqType, err := BQTypeStringFromToolType(p.GetType())
|
||||
bqType, err := bqutil.BQTypeStringFromToolType(p.GetType())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -233,20 +237,36 @@ func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken
|
||||
query.Parameters = highLevelParams
|
||||
query.Location = bqClient.Location
|
||||
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, bqClient.Project(), bqClient.Location, newStatement, lowLevelParams, query.ConnectionProperties)
|
||||
if err != nil {
|
||||
// This is a fallback check in case the switch logic was bypassed.
|
||||
return nil, fmt.Errorf("final query validation failed: %w", err)
|
||||
connProps := []*bigqueryapi.ConnectionProperty{}
|
||||
if t.SessionProvider != nil {
|
||||
session, err := t.SessionProvider(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get BigQuery session: %w", err)
|
||||
}
|
||||
if session != nil {
|
||||
// Add session ID to the connection properties for subsequent calls.
|
||||
connProps = append(connProps, &bigqueryapi.ConnectionProperty{Key: "session_id", Value: session.ID})
|
||||
}
|
||||
}
|
||||
query.ConnectionProperties = connProps
|
||||
dryRunJob, err := bqutil.DryRunQuery(ctx, restService, bqClient.Project(), query.Location, newStatement, lowLevelParams, connProps)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("query validation failed: %w", err)
|
||||
}
|
||||
|
||||
statementType := dryRunJob.Statistics.Query.StatementType
|
||||
|
||||
// This block handles SELECT statements, which return a row set.
|
||||
// We iterate through the results, convert each row into a map of
|
||||
// column names to values, and return the collection of rows.
|
||||
it, err := query.Read(ctx)
|
||||
job, err := query.Run(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to execute query: %w", err)
|
||||
}
|
||||
it, err := job.Read(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to read query results: %w", err)
|
||||
}
|
||||
|
||||
var out []any
|
||||
for {
|
||||
@@ -300,19 +320,3 @@ func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
}
|
||||
|
||||
func BQTypeStringFromToolType(toolType string) (string, error) {
|
||||
switch toolType {
|
||||
case "string":
|
||||
return "STRING", nil
|
||||
case "integer":
|
||||
return "INT64", nil
|
||||
case "float":
|
||||
return "FLOAT64", nil
|
||||
case "boolean":
|
||||
return "BOOL", nil
|
||||
// Note: 'array' is handled separately as it has a nested item type.
|
||||
default:
|
||||
return "", fmt.Errorf("unsupported tool parameter type for BigQuery: %s", toolType)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dataformcompilelocal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
"github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
)
|
||||
|
||||
const kind string = "dataform-compile-local"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
allParameters := tools.Parameters{
|
||||
tools.NewStringParameter("project_dir", "The Dataform project directory."),
|
||||
}
|
||||
paramManifest := allParameters.Manifest()
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, allParameters)
|
||||
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
Parameters: allParameters,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: paramManifest, AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
|
||||
return t, nil
|
||||
}
|
||||
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"allParams"`
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
paramsMap := params.AsMap()
|
||||
|
||||
projectDir, ok := paramsMap["project_dir"].(string)
|
||||
if !ok || projectDir == "" {
|
||||
return nil, fmt.Errorf("error casting 'project_dir' to string or invalid value")
|
||||
}
|
||||
|
||||
cmd := exec.CommandContext(ctx, "dataform", "compile", projectDir, "--json")
|
||||
output, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error executing dataform compile: %w\nOutput: %s", err, string(output))
|
||||
}
|
||||
|
||||
return strings.TrimSpace(string(output)), nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return false
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dataformcompilelocal_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
|
||||
)
|
||||
|
||||
func TestParseFromYamlDataformCompile(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: dataform-compile-local
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": dataformcompilelocal.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "dataform-compile-local",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,550 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package lookerconversationalanalytics
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookerds "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
const kind string = "looker-conversational-analytics"
|
||||
|
||||
const instructions = `**INSTRUCTIONS - FOLLOW THESE RULES:**
|
||||
1. **CONTENT:** Your answer should present the supporting data and then provide a conclusion based on that data.
|
||||
2. **OUTPUT FORMAT:** Your entire response MUST be in plain text format ONLY.
|
||||
3. **NO CHARTS:** You are STRICTLY FORBIDDEN from generating any charts, graphs, images, or any other form of visualization.`
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type compatibleSource interface {
|
||||
GetApiSettings() *rtl.ApiSettings
|
||||
GoogleCloudTokenSourceWithScope(ctx context.Context, scope string) (oauth2.TokenSource, error)
|
||||
GoogleCloudProject() string
|
||||
GoogleCloudLocation() string
|
||||
UseClientAuthorization() bool
|
||||
}
|
||||
|
||||
// Structs for building the JSON payload
|
||||
type UserMessage struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
type Message struct {
|
||||
UserMessage UserMessage `json:"userMessage"`
|
||||
}
|
||||
type LookerExploreReference struct {
|
||||
LookerInstanceUri string `json:"lookerInstanceUri"`
|
||||
LookmlModel string `json:"lookmlModel"`
|
||||
Explore string `json:"explore"`
|
||||
}
|
||||
type LookerExploreReferences struct {
|
||||
ExploreReferences []LookerExploreReference `json:"exploreReferences"`
|
||||
Credentials Credentials `json:"credentials,omitzero"`
|
||||
}
|
||||
type SecretBased struct {
|
||||
ClientId string `json:"clientId"`
|
||||
ClientSecret string `json:"clientSecret"`
|
||||
}
|
||||
type TokenBased struct {
|
||||
AccessToken string `json:"accessToken"`
|
||||
}
|
||||
type OAuthCredentials struct {
|
||||
Secret SecretBased `json:"secret,omitzero"`
|
||||
Token TokenBased `json:"token,omitzero"`
|
||||
}
|
||||
type Credentials struct {
|
||||
OAuth OAuthCredentials `json:"oauth"`
|
||||
}
|
||||
type DatasourceReferences struct {
|
||||
Looker LookerExploreReferences `json:"looker"`
|
||||
}
|
||||
type ImageOptions struct {
|
||||
NoImage map[string]any `json:"noImage"`
|
||||
}
|
||||
type ChartOptions struct {
|
||||
Image ImageOptions `json:"image"`
|
||||
}
|
||||
type Python struct {
|
||||
Enabled bool `json:"enabled"`
|
||||
}
|
||||
type AnalysisOptions struct {
|
||||
Python Python `json:"python"`
|
||||
}
|
||||
type ConversationOptions struct {
|
||||
Chart ChartOptions `json:"chart,omitzero"`
|
||||
Analysis AnalysisOptions `json:"analysis,omitzero"`
|
||||
}
|
||||
type InlineContext struct {
|
||||
SystemInstruction string `json:"systemInstruction"`
|
||||
DatasourceReferences DatasourceReferences `json:"datasourceReferences"`
|
||||
Options ConversationOptions `json:"options"`
|
||||
}
|
||||
type CAPayload struct {
|
||||
Messages []Message `json:"messages"`
|
||||
InlineContext InlineContext `json:"inlineContext"`
|
||||
ClientIdEnum string `json:"clientIdEnum"`
|
||||
}
|
||||
|
||||
// validate compatible sources are still compatible
|
||||
var _ compatibleSource = &lookerds.Source{}
|
||||
|
||||
var compatibleSources = [...]string{lookerds.SourceKind}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(compatibleSource)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be one of %q", kind, compatibleSources)
|
||||
}
|
||||
|
||||
if s.GoogleCloudProject() == "" {
|
||||
return nil, fmt.Errorf("project must be defined for source to use with %q tool", kind)
|
||||
}
|
||||
|
||||
userQueryParameter := tools.NewStringParameter("user_query_with_context", "The user's question, potentially including conversation history and system instructions for context.")
|
||||
|
||||
exploreRefsDescription := `An Array of at least one and up to 5 explore references like [{'model': 'MODEL_NAME', 'explore': 'EXPLORE_NAME'}]`
|
||||
exploreRefsParameter := tools.NewArrayParameter(
|
||||
"explore_references",
|
||||
exploreRefsDescription,
|
||||
tools.NewMapParameter(
|
||||
"explore_reference",
|
||||
"An explore reference like {'model': 'MODEL_NAME', 'explore': 'EXPLORE_NAME'}",
|
||||
"",
|
||||
),
|
||||
)
|
||||
|
||||
parameters := tools.Parameters{userQueryParameter, exploreRefsParameter}
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
|
||||
|
||||
// Get cloud-platform token source for Gemini Data Analytics API during initialization
|
||||
ctx := context.Background()
|
||||
ts, err := s.GoogleCloudTokenSourceWithScope(ctx, "https://www.googleapis.com/auth/cloud-platform")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get cloud-platform token source: %w", err)
|
||||
}
|
||||
|
||||
// finish tool setup
|
||||
t := Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
ApiSettings: s.GetApiSettings(),
|
||||
Project: s.GoogleCloudProject(),
|
||||
Location: s.GoogleCloudLocation(),
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientAuthorization(),
|
||||
TokenSource: ts,
|
||||
manifest: tools.Manifest{Description: cfg.Description, Parameters: parameters.Manifest(), AuthRequired: cfg.AuthRequired},
|
||||
mcpManifest: mcpManifest,
|
||||
}
|
||||
return t, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
ApiSettings *rtl.ApiSettings
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
UseClientOAuth bool `yaml:"useClientOAuth"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
Project string
|
||||
Location string
|
||||
TokenSource oauth2.TokenSource
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
var tokenStr string
|
||||
var err error
|
||||
|
||||
// Get credentials for the API call
|
||||
// Use cloud-platform token source for Gemini Data Analytics API
|
||||
if t.TokenSource == nil {
|
||||
return nil, fmt.Errorf("cloud-platform token source is missing")
|
||||
}
|
||||
token, err := t.TokenSource.Token()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get token from cloud-platform token source: %w", err)
|
||||
}
|
||||
tokenStr = token.AccessToken
|
||||
|
||||
// Extract parameters from the map
|
||||
mapParams := params.AsMap()
|
||||
userQuery, _ := mapParams["user_query_with_context"].(string)
|
||||
exploreReferences, _ := mapParams["explore_references"].([]any)
|
||||
|
||||
ler := make([]LookerExploreReference, 0)
|
||||
for _, er := range exploreReferences {
|
||||
ler = append(ler, LookerExploreReference{
|
||||
LookerInstanceUri: t.ApiSettings.BaseUrl,
|
||||
LookmlModel: er.(map[string]any)["model"].(string),
|
||||
Explore: er.(map[string]any)["explore"].(string),
|
||||
})
|
||||
}
|
||||
oauth_creds := OAuthCredentials{}
|
||||
if t.UseClientOAuth {
|
||||
oauth_creds.Token = TokenBased{AccessToken: string(accessToken)}
|
||||
} else {
|
||||
oauth_creds.Secret = SecretBased{ClientId: t.ApiSettings.ClientId, ClientSecret: t.ApiSettings.ClientSecret}
|
||||
}
|
||||
|
||||
lers := LookerExploreReferences{
|
||||
ExploreReferences: ler,
|
||||
Credentials: Credentials{
|
||||
OAuth: oauth_creds,
|
||||
},
|
||||
}
|
||||
|
||||
// Construct URL, headers, and payload
|
||||
projectID := t.Project
|
||||
location := t.Location
|
||||
caURL := fmt.Sprintf("https://geminidataanalytics.googleapis.com/v1beta/projects/%s/locations/%s:chat", url.PathEscape(projectID), url.PathEscape(location))
|
||||
|
||||
headers := map[string]string{
|
||||
"Authorization": fmt.Sprintf("Bearer %s", tokenStr),
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
payload := CAPayload{
|
||||
Messages: []Message{{UserMessage: UserMessage{Text: userQuery}}},
|
||||
InlineContext: InlineContext{
|
||||
SystemInstruction: instructions,
|
||||
DatasourceReferences: DatasourceReferences{
|
||||
Looker: lers,
|
||||
},
|
||||
Options: ConversationOptions{Chart: ChartOptions{Image: ImageOptions{NoImage: map[string]any{}}}},
|
||||
},
|
||||
ClientIdEnum: "GENAI_TOOLBOX",
|
||||
}
|
||||
|
||||
// Call the streaming API
|
||||
response, err := getStream(ctx, caURL, payload, headers)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get response from conversational analytics API: %w", err)
|
||||
}
|
||||
|
||||
return response, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
}
|
||||
|
||||
// StreamMessage represents a single message object from the streaming API response.
|
||||
type StreamMessage struct {
|
||||
SystemMessage *SystemMessage `json:"systemMessage,omitempty"`
|
||||
}
|
||||
|
||||
// SystemMessage contains different types of system-generated content.
|
||||
type SystemMessage struct {
|
||||
Text *TextMessage `json:"text,omitempty"`
|
||||
Schema *SchemaMessage `json:"schema,omitempty"`
|
||||
Data *DataMessage `json:"data,omitempty"`
|
||||
Analysis *AnalysisMessage `json:"analysis,omitempty"`
|
||||
Error *ErrorMessage `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// TextMessage contains textual parts of a message.
|
||||
type TextMessage struct {
|
||||
Parts []string `json:"parts"`
|
||||
}
|
||||
|
||||
// SchemaMessage contains schema-related information.
|
||||
type SchemaMessage struct {
|
||||
Query *SchemaQuery `json:"query,omitempty"`
|
||||
Result *SchemaResult `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
// SchemaQuery holds the question that prompted a schema lookup.
|
||||
type SchemaQuery struct {
|
||||
Question string `json:"question"`
|
||||
}
|
||||
|
||||
// SchemaResult contains the datasources with their schemas.
|
||||
type SchemaResult struct {
|
||||
Datasources []Datasource `json:"datasources"`
|
||||
}
|
||||
|
||||
// Datasource represents a data source with its reference and schema.
|
||||
type Datasource struct {
|
||||
LookerExploreReference LookerExploreReference `json:"lookerExploreReference"`
|
||||
}
|
||||
|
||||
// DataMessage contains data-related information, like queries and results.
|
||||
type DataMessage struct {
|
||||
GeneratedLookerQuery *LookerQuery `json:"generatedLookerQuery,omitempty"`
|
||||
Result *DataResult `json:"result,omitempty"`
|
||||
}
|
||||
|
||||
type LookerQuery struct {
|
||||
Model string `json:"model"`
|
||||
Explore string `json:"explore"`
|
||||
Fields []string `json:"fields"`
|
||||
Filters []Filter `json:"filters,omitempty"`
|
||||
Sorts []string `json:"sorts,omitempty"`
|
||||
Limit string `json:"limit,omitempty"`
|
||||
}
|
||||
|
||||
type Filter struct {
|
||||
Field string `json:"field,omitempty"`
|
||||
Value string `json:"value,omitempty"`
|
||||
}
|
||||
|
||||
// DataResult contains the schema and rows of a query result.
|
||||
type DataResult struct {
|
||||
Data []map[string]any `json:"data"`
|
||||
}
|
||||
|
||||
type AnalysisQuery struct {
|
||||
Question string `json:"question,omitempty"`
|
||||
DataResultNames []string `json:"dataResultNames,omitempty"`
|
||||
}
|
||||
type AnalysisEvent struct {
|
||||
PlannerReasoning string `json:"plannerReasoning,omitempty"`
|
||||
CoderInstructions string `json:"coderInstructions,omitempty"`
|
||||
Code string `json:"code,omitempty"`
|
||||
ExecutionOutput string `json:"executionOutput,omitempty"`
|
||||
ExecutionError string `json:"executionError,omitempty"`
|
||||
ResultVegaChartJson string `json:"resultVegaChartJson,omitempty"`
|
||||
ResultNaturalLanguage string `json:"resultNaturalLanguage,omitempty"`
|
||||
ResultCsvData string `json:"resultCsvData,omitempty"`
|
||||
ResultReferenceData string `json:"resultReferenceData,omitempty"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
type AnalysisMessage struct {
|
||||
Query AnalysisQuery `json:"query,omitempty"`
|
||||
ProgressEvent AnalysisEvent `json:"progressEvent,omitempty"`
|
||||
}
|
||||
|
||||
// ErrorResponse represents an error message from the API.
|
||||
type ErrorMessage struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
func getStream(ctx context.Context, url string, payload CAPayload, headers map[string]string) ([]map[string]any, error) {
|
||||
payloadBytes, err := json.Marshal(payload)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal payload: %w", err)
|
||||
}
|
||||
|
||||
req, err := http.NewRequest("POST", url, bytes.NewBuffer(payloadBytes))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create request: %w", err)
|
||||
}
|
||||
for k, v := range headers {
|
||||
req.Header.Set(k, v)
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to send request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
body, _ := io.ReadAll(resp.Body)
|
||||
return nil, fmt.Errorf("API returned non-200 status: %d %s", resp.StatusCode, string(body))
|
||||
}
|
||||
|
||||
var messages []map[string]any
|
||||
decoder := json.NewDecoder(resp.Body)
|
||||
|
||||
// The response is a JSON array, so we read the opening bracket.
|
||||
if _, err := decoder.Token(); err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, nil // Empty response is valid
|
||||
}
|
||||
return nil, fmt.Errorf("error reading start of json array: %w", err)
|
||||
}
|
||||
|
||||
for decoder.More() {
|
||||
var msg StreamMessage
|
||||
if err := decoder.Decode(&msg); err != nil {
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
return nil, fmt.Errorf("error decoding stream message: %w", err)
|
||||
}
|
||||
|
||||
var newMessage map[string]any
|
||||
if msg.SystemMessage != nil {
|
||||
if msg.SystemMessage.Text != nil {
|
||||
newMessage = handleTextResponse(ctx, msg.SystemMessage.Text)
|
||||
} else if msg.SystemMessage.Schema != nil {
|
||||
newMessage = handleSchemaResponse(ctx, msg.SystemMessage.Schema)
|
||||
} else if msg.SystemMessage.Data != nil {
|
||||
newMessage = handleDataResponse(ctx, msg.SystemMessage.Data)
|
||||
} else if msg.SystemMessage.Analysis != nil {
|
||||
newMessage = handleAnalysisResponse(ctx, msg.SystemMessage.Analysis)
|
||||
} else if msg.SystemMessage.Error != nil {
|
||||
newMessage = handleError(ctx, msg.SystemMessage.Error)
|
||||
}
|
||||
messages = appendMessage(messages, newMessage)
|
||||
}
|
||||
}
|
||||
|
||||
return messages, nil
|
||||
}
|
||||
|
||||
func formatDatasourceAsDict(ctx context.Context, datasource *Datasource) map[string]any {
|
||||
logger, _ := util.LoggerFromContext(ctx)
|
||||
logger.DebugContext(ctx, "Datasource %s", *datasource)
|
||||
ds := make(map[string]any)
|
||||
ds["model"] = datasource.LookerExploreReference.LookmlModel
|
||||
ds["explore"] = datasource.LookerExploreReference.Explore
|
||||
ds["lookerInstanceUri"] = datasource.LookerExploreReference.LookerInstanceUri
|
||||
return map[string]any{"Datasource": ds}
|
||||
}
|
||||
|
||||
func handleAnalysisResponse(ctx context.Context, resp *AnalysisMessage) map[string]any {
|
||||
logger, _ := util.LoggerFromContext(ctx)
|
||||
jsonData, err := json.Marshal(*resp)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "error marshaling struct: %w", err)
|
||||
return map[string]any{"Analysis": "error"}
|
||||
}
|
||||
return map[string]any{"Analysis": jsonData}
|
||||
}
|
||||
|
||||
func handleTextResponse(ctx context.Context, resp *TextMessage) map[string]any {
|
||||
logger, _ := util.LoggerFromContext(ctx)
|
||||
logger.DebugContext(ctx, "Text Response: %s", strings.Join(resp.Parts, ""))
|
||||
return map[string]any{"Answer": strings.Join(resp.Parts, "")}
|
||||
}
|
||||
|
||||
func handleSchemaResponse(ctx context.Context, resp *SchemaMessage) map[string]any {
|
||||
if resp.Query != nil {
|
||||
return map[string]any{"Question": resp.Query.Question}
|
||||
}
|
||||
if resp.Result != nil {
|
||||
var formattedSources []map[string]any
|
||||
for _, ds := range resp.Result.Datasources {
|
||||
formattedSources = append(formattedSources, formatDatasourceAsDict(ctx, &ds))
|
||||
}
|
||||
return map[string]any{"Schema Resolved": formattedSources}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleDataResponse(ctx context.Context, resp *DataMessage) map[string]any {
|
||||
if resp.GeneratedLookerQuery != nil {
|
||||
logger, _ := util.LoggerFromContext(ctx)
|
||||
jsonData, err := json.Marshal(resp.GeneratedLookerQuery)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, "error marshaling struct: %w", err)
|
||||
return map[string]any{"Retrieval Query": "error"}
|
||||
}
|
||||
return map[string]any{
|
||||
"Retrieval Query": jsonData,
|
||||
}
|
||||
}
|
||||
if resp.Result != nil {
|
||||
|
||||
return map[string]any{
|
||||
"Data Retrieved": resp.Result.Data,
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleError(ctx context.Context, resp *ErrorMessage) map[string]any {
|
||||
logger, _ := util.LoggerFromContext(ctx)
|
||||
logger.DebugContext(ctx, "Error Response: %s", resp.Text)
|
||||
return map[string]any{
|
||||
"Error": map[string]any{
|
||||
"Message": resp.Text,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func appendMessage(messages []map[string]any, newMessage map[string]any) []map[string]any {
|
||||
if newMessage == nil {
|
||||
return messages
|
||||
}
|
||||
if len(messages) > 0 {
|
||||
if _, ok := messages[len(messages)-1]["Data Retrieved"]; ok {
|
||||
messages = messages[:len(messages)-1]
|
||||
}
|
||||
}
|
||||
return append(messages, newMessage)
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package lookerconversationalanalytics_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
|
||||
)
|
||||
|
||||
func TestParseFromYamlLookerConversationalAnalytics(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-conversational-analytics
|
||||
source: my-instance
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": lookerconversationalanalytics.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "looker-conversational-analytics",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
555
internal/tools/looker/lookerhealthanalyze/lookerhealthanalyze.go
Normal file
555
internal/tools/looker/lookerhealthanalyze/lookerhealthanalyze.go
Normal file
@@ -0,0 +1,555 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package lookerhealthanalyze
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
)
|
||||
|
||||
// =================================================================================================================
|
||||
// START MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
const kind string = "looker-health-analyze"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters map[string]any `yaml:"parameters"`
|
||||
}
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
s, ok := rawS.(*lookersrc.Source)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
actionParameter := tools.NewStringParameterWithRequired("action", "The analysis to run. Can be 'projects', 'models', or 'explores'.", true)
|
||||
projectParameter := tools.NewStringParameterWithRequired("project", "The Looker project to analyze (optional).", false)
|
||||
modelParameter := tools.NewStringParameterWithRequired("model", "The Looker model to analyze (optional).", false)
|
||||
exploreParameter := tools.NewStringParameterWithRequired("explore", "The Looker explore to analyze (optional).", false)
|
||||
timeframeParameter := tools.NewIntParameterWithDefault("timeframe", 90, "The timeframe in days to analyze.")
|
||||
minQueriesParameter := tools.NewIntParameterWithDefault("min_queries", 0, "The minimum number of queries for a model or explore to be considered used.")
|
||||
|
||||
parameters := tools.Parameters{
|
||||
actionParameter,
|
||||
projectParameter,
|
||||
modelParameter,
|
||||
exploreParameter,
|
||||
timeframeParameter,
|
||||
minQueriesParameter,
|
||||
}
|
||||
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
|
||||
|
||||
return Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientOAuth,
|
||||
Client: s.Client,
|
||||
ApiSettings: s.ApiSettings,
|
||||
manifest: tools.Manifest{
|
||||
Description: cfg.Description,
|
||||
Parameters: parameters.Manifest(),
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
},
|
||||
mcpManifest: mcpManifest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
UseClientOAuth bool
|
||||
Client *v4.LookerSDK
|
||||
ApiSettings *rtl.ApiSettings
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
sdk, err := lookercommon.GetLookerSDK(t.UseClientOAuth, t.ApiSettings, t.Client, accessToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting sdk: %w", err)
|
||||
}
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
timeframe, _ := paramsMap["timeframe"].(int)
|
||||
if timeframe == 0 {
|
||||
timeframe = 90
|
||||
}
|
||||
minQueries, _ := paramsMap["min_queries"].(int)
|
||||
if minQueries == 0 {
|
||||
minQueries = 1
|
||||
}
|
||||
|
||||
analyzeTool := &analyzeTool{
|
||||
SdkClient: sdk,
|
||||
timeframe: timeframe,
|
||||
minQueries: minQueries,
|
||||
}
|
||||
|
||||
action, ok := paramsMap["action"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("action parameter not found")
|
||||
}
|
||||
|
||||
switch action {
|
||||
case "projects":
|
||||
projectId, _ := paramsMap["project"].(string)
|
||||
result, err := analyzeTool.projects(ctx, projectId)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error analyzing projects: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "result = ", result)
|
||||
return result, nil
|
||||
case "models":
|
||||
projectName, _ := paramsMap["project"].(string)
|
||||
modelName, _ := paramsMap["model"].(string)
|
||||
result, err := analyzeTool.models(ctx, projectName, modelName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error analyzing models: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "result = ", result)
|
||||
return result, nil
|
||||
case "explores":
|
||||
modelName, _ := paramsMap["model"].(string)
|
||||
exploreName, _ := paramsMap["explore"].(string)
|
||||
result, err := analyzeTool.explores(ctx, modelName, exploreName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error analyzing explores: %w", err)
|
||||
}
|
||||
logger.DebugContext(ctx, "result = ", result)
|
||||
return result, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown action: %s", action)
|
||||
}
|
||||
}
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
|
||||
// =================================================================================================================
|
||||
// START LOOKER HEALTH ANALYZE CORE LOGIC
|
||||
// =================================================================================================================
|
||||
type analyzeTool struct {
|
||||
SdkClient *v4.LookerSDK
|
||||
timeframe int
|
||||
minQueries int
|
||||
}
|
||||
|
||||
func (t *analyzeTool) projects(ctx context.Context, id string) ([]map[string]interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
var projects []*v4.Project
|
||||
if id != "" {
|
||||
p, err := t.SdkClient.Project(id, "", nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching project %s: %w", id, err)
|
||||
}
|
||||
projects = append(projects, &p)
|
||||
} else {
|
||||
allProjects, err := t.SdkClient.AllProjects("", nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching all projects: %w", err)
|
||||
}
|
||||
for i := range allProjects {
|
||||
projects = append(projects, &allProjects[i])
|
||||
}
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, p := range projects {
|
||||
pName := *p.Name
|
||||
pID := *p.Id
|
||||
logger.InfoContext(ctx, fmt.Sprintf("Analyzing project: %s", pName))
|
||||
|
||||
projectFiles, err := t.SdkClient.AllProjectFiles(pID, "", nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching files for project %s: %w", pName, err)
|
||||
}
|
||||
|
||||
modelCount := 0
|
||||
viewFileCount := 0
|
||||
for _, f := range projectFiles {
|
||||
if f.Type != nil {
|
||||
if *f.Type == "model" {
|
||||
modelCount++
|
||||
}
|
||||
if *f.Type == "view" {
|
||||
viewFileCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gitConnectionStatus := "OK"
|
||||
if p.GitRemoteUrl == nil {
|
||||
gitConnectionStatus = "No repo found"
|
||||
} else if strings.Contains(*p.GitRemoteUrl, "/bare_models/") {
|
||||
gitConnectionStatus = "Bare repo, no tests required"
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Project": pName,
|
||||
"# Models": modelCount,
|
||||
"# View Files": viewFileCount,
|
||||
"Git Connection Status": gitConnectionStatus,
|
||||
"PR Mode": string(*p.PullRequestMode),
|
||||
"Is Validation Required": *p.ValidationRequired,
|
||||
})
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *analyzeTool) models(ctx context.Context, project, model string) ([]map[string]interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Analyzing models...")
|
||||
|
||||
usedModels, err := t.getUsedModels(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching LookML models: %w", err)
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, m := range lookmlModels {
|
||||
if (project == "" || (m.ProjectName != nil && *m.ProjectName == project)) &&
|
||||
(model == "" || (m.Name != nil && *m.Name == model)) {
|
||||
|
||||
queryCount := 0
|
||||
if qc, ok := usedModels[*m.Name]; ok {
|
||||
queryCount = qc
|
||||
}
|
||||
|
||||
exploreCount := 0
|
||||
if m.Explores != nil {
|
||||
exploreCount = len(*m.Explores)
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Project": *m.ProjectName,
|
||||
"Model": *m.Name,
|
||||
"# Explores": exploreCount,
|
||||
"Query Count": queryCount,
|
||||
})
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *analyzeTool) getUsedModels(ctx context.Context) (map[string]int, error) {
|
||||
limit := "5000"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"history.query_run_count", "query.model"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"query.model": "-system__activity, -i__looker",
|
||||
"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
|
||||
"user.dev_branch_name": "NULL",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(raw), &data)
|
||||
|
||||
results := make(map[string]int)
|
||||
for _, row := range data {
|
||||
model, _ := row["query.model"].(string)
|
||||
count, _ := row["history.query_run_count"].(float64)
|
||||
results[model] = int(count)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *analyzeTool) getUsedExploreFields(ctx context.Context, model, explore string) (map[string]int, error) {
|
||||
limit := "5000"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"query.formatted_fields", "query.filters", "history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"query.model": strings.ReplaceAll(model, "_", "^_"),
|
||||
"query.view": strings.ReplaceAll(explore, "_", "^_"),
|
||||
"query.formatted_fields": "-NULL",
|
||||
"history.workspace_id": "production",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(raw), &data)
|
||||
|
||||
results := make(map[string]int)
|
||||
fieldRegex := regexp.MustCompile(`(\w+\.\w+)`)
|
||||
|
||||
for _, row := range data {
|
||||
count, _ := row["history.query_run_count"].(float64)
|
||||
formattedFields, _ := row["query.formatted_fields"].(string)
|
||||
filters, _ := row["query.filters"].(string)
|
||||
|
||||
usedFields := make(map[string]bool)
|
||||
|
||||
for _, field := range fieldRegex.FindAllString(formattedFields, -1) {
|
||||
results[field] += int(count)
|
||||
usedFields[field] = true
|
||||
}
|
||||
|
||||
for _, field := range fieldRegex.FindAllString(filters, -1) {
|
||||
if _, ok := usedFields[field]; !ok {
|
||||
results[field] += int(count)
|
||||
}
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *analyzeTool) explores(ctx context.Context, model, explore string) ([]map[string]interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Analyzing explores...")
|
||||
|
||||
lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching LookML models: %w", err)
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, m := range lookmlModels {
|
||||
if model != "" && (m.Name == nil || *m.Name != model) {
|
||||
continue
|
||||
}
|
||||
if m.Explores == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, e := range *m.Explores {
|
||||
if explore != "" && (e.Name == nil || *e.Name != explore) {
|
||||
continue
|
||||
}
|
||||
if e.Name == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
// Get detailed explore info to count fields and joins
|
||||
req := v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: *m.Name,
|
||||
ExploreName: *e.Name,
|
||||
}
|
||||
exploreDetail, err := t.SdkClient.LookmlModelExplore(req, nil)
|
||||
if err != nil {
|
||||
// Log the error but continue to the next explore if possible
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error fetching detail for explore %s.%s: %v", *m.Name, *e.Name, err))
|
||||
continue
|
||||
}
|
||||
|
||||
fieldCount := 0
|
||||
if exploreDetail.Fields != nil {
|
||||
fieldCount = len(*exploreDetail.Fields.Dimensions) + len(*exploreDetail.Fields.Measures)
|
||||
}
|
||||
|
||||
joinCount := 0
|
||||
if exploreDetail.Joins != nil {
|
||||
joinCount = len(*exploreDetail.Joins)
|
||||
}
|
||||
|
||||
usedFields, err := t.getUsedExploreFields(ctx, *m.Name, *e.Name)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error fetching used fields for explore %s.%s: %v", *m.Name, *e.Name, err))
|
||||
continue
|
||||
}
|
||||
|
||||
allFields := []string{}
|
||||
if exploreDetail.Fields != nil {
|
||||
for _, d := range *exploreDetail.Fields.Dimensions {
|
||||
if !*d.Hidden {
|
||||
allFields = append(allFields, *d.Name)
|
||||
}
|
||||
}
|
||||
for _, ms := range *exploreDetail.Fields.Measures {
|
||||
if !*ms.Hidden {
|
||||
allFields = append(allFields, *ms.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unusedFieldsCount := 0
|
||||
for _, field := range allFields {
|
||||
if _, ok := usedFields[field]; !ok {
|
||||
unusedFieldsCount++
|
||||
}
|
||||
}
|
||||
|
||||
joinStats := make(map[string]int)
|
||||
if exploreDetail.Joins != nil {
|
||||
for field, queryCount := range usedFields {
|
||||
join := strings.Split(field, ".")[0]
|
||||
joinStats[join] += queryCount
|
||||
}
|
||||
for _, join := range *exploreDetail.Joins {
|
||||
if _, ok := joinStats[*join.Name]; !ok {
|
||||
joinStats[*join.Name] = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
unusedJoinsCount := 0
|
||||
for _, count := range joinStats {
|
||||
if count == 0 {
|
||||
unusedJoinsCount++
|
||||
}
|
||||
}
|
||||
|
||||
// Use an inline query to get query count for the explore
|
||||
limit := "1"
|
||||
queryCountQueryBody := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"query.model": *m.Name,
|
||||
"query.view": *e.Name,
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
|
||||
"user.dev_branch_name": "NULL",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
|
||||
rawQueryCount, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, queryCountQueryBody, "json", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
queryCount := 0
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(rawQueryCount), &data)
|
||||
if len(data) > 0 {
|
||||
if count, ok := data[0]["history.query_run_count"].(float64); ok {
|
||||
queryCount = int(count)
|
||||
}
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Model": *m.Name,
|
||||
"Explore": *e.Name,
|
||||
"Is Hidden": *e.Hidden,
|
||||
"Has Description": e.Description != nil && *e.Description != "",
|
||||
"# Joins": joinCount,
|
||||
"# Unused Joins": unusedJoinsCount,
|
||||
"# Unused Fields": unusedFieldsCount,
|
||||
"# Fields": fieldCount,
|
||||
"Query Count": queryCount,
|
||||
})
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END LOOKER HEALTH ANALYZE CORE LOGIC
|
||||
// =================================================================================================================
|
||||
@@ -0,0 +1,113 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package lookerhealthanalyze_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
lha "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
|
||||
)
|
||||
|
||||
func TestParseFromYamlLookerHealthAnalyze(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-analyze
|
||||
source: my-instance
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": lha.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "looker-health-analyze",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlLookerHealthAnalyze(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "Invalid field",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-analyze
|
||||
source: my-instance
|
||||
invalid_field: true
|
||||
`,
|
||||
err: "unable to parse tool \"example_tool\" as kind \"looker-health-analyze\": [2:1] unknown field \"invalid_field\"",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if !strings.Contains(errStr, tc.err) {
|
||||
t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
459
internal/tools/looker/lookerhealthpulse/lookerhealthpulse.go
Normal file
459
internal/tools/looker/lookerhealthpulse/lookerhealthpulse.go
Normal file
@@ -0,0 +1,459 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package lookerhealthpulse
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
)
|
||||
|
||||
// =================================================================================================================
|
||||
// START MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
const kind string = "looker-health-pulse"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters map[string]any `yaml:"parameters"`
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
// verify source exists
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
// verify the source is compatible
|
||||
s, ok := rawS.(*lookersrc.Source)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
actionParameter := tools.NewStringParameterWithRequired("action", "The health check to run. Can be either: `check_db_connections`, `check_dashboard_performance`,`check_dashboard_errors`,`check_explore_performance`,`check_schedule_failures`, or `check_legacy_features`", true)
|
||||
|
||||
parameters := tools.Parameters{
|
||||
actionParameter,
|
||||
}
|
||||
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
|
||||
|
||||
// finish tool setup
|
||||
return Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientOAuth,
|
||||
Client: s.Client,
|
||||
ApiSettings: s.ApiSettings,
|
||||
manifest: tools.Manifest{
|
||||
Description: cfg.Description,
|
||||
Parameters: parameters.Manifest(),
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
},
|
||||
mcpManifest: mcpManifest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// validate interface
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
UseClientOAuth bool
|
||||
Client *v4.LookerSDK
|
||||
ApiSettings *rtl.ApiSettings
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters `yaml:"parameters"`
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
|
||||
sdk, err := lookercommon.GetLookerSDK(t.UseClientOAuth, t.ApiSettings, t.Client, accessToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting sdk: %w", err)
|
||||
}
|
||||
|
||||
pulseTool := &pulseTool{
|
||||
ApiSettings: t.ApiSettings,
|
||||
SdkClient: sdk,
|
||||
}
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
action, ok := paramsMap["action"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("action parameter not found")
|
||||
}
|
||||
|
||||
pulseParams := PulseParams{
|
||||
Action: action,
|
||||
}
|
||||
|
||||
result, err := pulseTool.RunPulse(ctx, pulseParams)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error running pulse: %w", err)
|
||||
}
|
||||
|
||||
logger.DebugContext(ctx, "result = ", result)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
|
||||
// =================================================================================================================
|
||||
// START LOOKER HEALTH PULSE CORE LOGIC
|
||||
// =================================================================================================================
|
||||
type PulseParams struct {
|
||||
Action string
|
||||
// Optionally add more parameters if needed
|
||||
}
|
||||
|
||||
// pulseTool holds Looker API settings and client
|
||||
type pulseTool struct {
|
||||
ApiSettings *rtl.ApiSettings
|
||||
SdkClient *v4.LookerSDK
|
||||
}
|
||||
|
||||
func (t *pulseTool) RunPulse(ctx context.Context, params PulseParams) (interface{}, error) {
|
||||
switch params.Action {
|
||||
case "check_db_connections":
|
||||
return t.checkDBConnections(ctx)
|
||||
case "check_dashboard_performance":
|
||||
return t.checkDashboardPerformance(ctx)
|
||||
case "check_dashboard_errors":
|
||||
return t.checkDashboardErrors(ctx)
|
||||
case "check_explore_performance":
|
||||
return t.checkExplorePerformance(ctx)
|
||||
case "check_schedule_failures":
|
||||
return t.checkScheduleFailures(ctx)
|
||||
case "check_legacy_features":
|
||||
return t.checkLegacyFeatures(ctx)
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown action: %s", params.Action)
|
||||
}
|
||||
}
|
||||
|
||||
// Check DB connections and run tests
|
||||
func (t *pulseTool) checkDBConnections(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 1/6: Checking connections")
|
||||
|
||||
reservedNames := map[string]struct{}{
|
||||
"looker__internal__analytics__replica": {},
|
||||
"looker__internal__analytics": {},
|
||||
"looker": {},
|
||||
"looker__ilooker": {},
|
||||
}
|
||||
|
||||
connections, err := t.SdkClient.AllConnections("", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching connections: %w", err)
|
||||
}
|
||||
|
||||
var filteredConnections []v4.DBConnection
|
||||
for _, c := range connections {
|
||||
if _, reserved := reservedNames[*c.Name]; !reserved {
|
||||
filteredConnections = append(filteredConnections, c)
|
||||
}
|
||||
}
|
||||
if len(filteredConnections) == 0 {
|
||||
return nil, fmt.Errorf("no connections found")
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, conn := range filteredConnections {
|
||||
var errors []string
|
||||
// Test connection (simulate test_connection endpoint)
|
||||
resp, err := t.SdkClient.TestConnection(*conn.Name, nil, t.ApiSettings)
|
||||
if err != nil {
|
||||
errors = append(errors, "API JSONDecode Error")
|
||||
} else {
|
||||
for _, r := range resp {
|
||||
if *r.Status == "error" {
|
||||
errors = append(errors, *r.Message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Run inline query for connection activity
|
||||
limit := "1"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"history.connection_name": *conn.Name,
|
||||
"history.created_date": "90 days",
|
||||
"user.dev_branch_name": "NULL",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var queryRunCount interface{}
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(raw), &data)
|
||||
if len(data) > 0 {
|
||||
queryRunCount = data[0]["history.query_run_count"]
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Connection": *conn.Name,
|
||||
"Status": "OK",
|
||||
"Errors": errors,
|
||||
"Query Count": queryRunCount,
|
||||
})
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *pulseTool) checkDashboardPerformance(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 2/6: Checking for dashboards with queries slower than 30 seconds in the last 7 days")
|
||||
|
||||
limit := "20"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"dashboard.title", "query.count"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": "7 days",
|
||||
"history.real_dash_id": "-NULL",
|
||||
"history.runtime": ">30",
|
||||
"history.status": "complete",
|
||||
},
|
||||
Sorts: &[]string{"query.count desc"},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dashboards []map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(raw), &dashboards); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dashboards, nil
|
||||
}
|
||||
|
||||
func (t *pulseTool) checkDashboardErrors(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 3/6: Checking for dashboards with erroring queries in the last 7 days")
|
||||
|
||||
limit := "20"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"dashboard.title", "history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"dashboard.title": "-NULL",
|
||||
"history.created_date": "7 days",
|
||||
"history.dashboard_session": "-NULL",
|
||||
"history.status": "error",
|
||||
},
|
||||
Sorts: &[]string{"history.query_run_count desc"},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var dashboards []map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(raw), &dashboards); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return dashboards, nil
|
||||
}
|
||||
|
||||
func (t *pulseTool) checkExplorePerformance(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 4/6: Checking for the slowest explores in the past 7 days")
|
||||
|
||||
limit := "20"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"query.model", "query.view", "history.average_runtime"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": "7 days",
|
||||
"query.model": "-NULL, -system^_^_activity",
|
||||
},
|
||||
Sorts: &[]string{"history.average_runtime desc"},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var explores []map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(raw), &explores); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Average query runtime
|
||||
query.Fields = &[]string{"history.average_runtime"}
|
||||
rawAvg, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var avgData []map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(rawAvg), &avgData); err == nil {
|
||||
if len(avgData) > 0 {
|
||||
if avgRuntime, ok := avgData[0]["history.average_runtime"].(float64); ok {
|
||||
logger.InfoContext(ctx, fmt.Sprintf("For context, the average query runtime is %.4fs", avgRuntime))
|
||||
}
|
||||
}
|
||||
}
|
||||
return explores, nil
|
||||
}
|
||||
|
||||
func (t *pulseTool) checkScheduleFailures(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 5/6: Checking for failing schedules")
|
||||
|
||||
limit := "500"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "scheduled_plan",
|
||||
Fields: &[]string{"scheduled_job.name", "scheduled_job.count"},
|
||||
Filters: &map[string]any{
|
||||
"scheduled_job.created_date": "7 days",
|
||||
"scheduled_job.status": "failure",
|
||||
},
|
||||
Sorts: &[]string{"scheduled_job.count desc"},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", t.ApiSettings)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var schedules []map[string]interface{}
|
||||
if err := json.Unmarshal([]byte(raw), &schedules); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return schedules, nil
|
||||
}
|
||||
|
||||
func (t *pulseTool) checkLegacyFeatures(ctx context.Context) (interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Test 6/6: Checking for enabled legacy features")
|
||||
|
||||
features, err := t.SdkClient.AllLegacyFeatures(t.ApiSettings)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "Unsupported in Looker (Google Cloud core)") {
|
||||
return []map[string]string{{"Feature": "Unsupported in Looker (Google Cloud core)"}}, nil
|
||||
}
|
||||
logger.ErrorContext(ctx, err.Error())
|
||||
return []map[string]string{{"Feature": "Unable to pull legacy features due to SDK error"}}, nil
|
||||
}
|
||||
var legacyFeatures []map[string]string
|
||||
for _, f := range features {
|
||||
if *f.Enabled {
|
||||
legacyFeatures = append(legacyFeatures, map[string]string{"Feature": *f.Name})
|
||||
}
|
||||
}
|
||||
return legacyFeatures, nil
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END LOOKER HEALTH PULSE CORE LOGIC
|
||||
// =================================================================================================================
|
||||
@@ -0,0 +1,113 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package lookerhealthpulse_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
lhp "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
|
||||
)
|
||||
|
||||
func TestParseFromYamlLookerHealthPulse(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-pulse
|
||||
source: my-instance
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": lhp.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "looker-health-pulse",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlLookerHealthPulse(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "Invalid field",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-pulse
|
||||
source: my-instance
|
||||
invalid_field: true
|
||||
`,
|
||||
err: "unable to parse tool \"example_tool\" as kind \"looker-health-pulse\": [2:1] unknown field \"invalid_field\"",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if !strings.Contains(errStr, tc.err) {
|
||||
t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
459
internal/tools/looker/lookerhealthvacuum/lookerhealthvacuum.go
Normal file
459
internal/tools/looker/lookerhealthvacuum/lookerhealthvacuum.go
Normal file
@@ -0,0 +1,459 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
package lookerhealthvacuum
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/googleapis/genai-toolbox/internal/sources"
|
||||
lookersrc "github.com/googleapis/genai-toolbox/internal/sources/looker"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools"
|
||||
"github.com/googleapis/genai-toolbox/internal/tools/looker/lookercommon"
|
||||
"github.com/googleapis/genai-toolbox/internal/util"
|
||||
"github.com/looker-open-source/sdk-codegen/go/rtl"
|
||||
v4 "github.com/looker-open-source/sdk-codegen/go/sdk/v4"
|
||||
)
|
||||
|
||||
// =================================================================================================================
|
||||
// START MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
const kind string = "looker-health-vacuum"
|
||||
|
||||
func init() {
|
||||
if !tools.Register(kind, newConfig) {
|
||||
panic(fmt.Sprintf("tool kind %q already registered", kind))
|
||||
}
|
||||
}
|
||||
|
||||
func newConfig(ctx context.Context, name string, decoder *yaml.Decoder) (tools.ToolConfig, error) {
|
||||
actual := Config{Name: name}
|
||||
if err := decoder.DecodeContext(ctx, &actual); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return actual, nil
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
Name string `yaml:"name" validate:"required"`
|
||||
Kind string `yaml:"kind" validate:"required"`
|
||||
Source string `yaml:"source" validate:"required"`
|
||||
Description string `yaml:"description" validate:"required"`
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters map[string]any `yaml:"parameters"`
|
||||
}
|
||||
|
||||
var _ tools.ToolConfig = Config{}
|
||||
|
||||
func (cfg Config) ToolConfigKind() string {
|
||||
return kind
|
||||
}
|
||||
|
||||
func (cfg Config) Initialize(srcs map[string]sources.Source) (tools.Tool, error) {
|
||||
rawS, ok := srcs[cfg.Source]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no source named %q configured", cfg.Source)
|
||||
}
|
||||
|
||||
s, ok := rawS.(*lookersrc.Source)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid source for %q tool: source kind must be `looker`", kind)
|
||||
}
|
||||
|
||||
actionParameter := tools.NewStringParameterWithRequired("action", "The vacuum action to run. Can be 'models', or 'explores'.", true)
|
||||
projectParameter := tools.NewStringParameterWithDefault("project", "", "The Looker project to vacuum (optional).")
|
||||
modelParameter := tools.NewStringParameterWithDefault("model", "", "The Looker model to vacuum (optional).")
|
||||
exploreParameter := tools.NewStringParameterWithDefault("explore", "", "The Looker explore to vacuum (optional).")
|
||||
timeframeParameter := tools.NewIntParameterWithDefault("timeframe", 90, "The timeframe in days to analyze.")
|
||||
minQueriesParameter := tools.NewIntParameterWithDefault("min_queries", 1, "The minimum number of queries for a model or explore to be considered used.")
|
||||
|
||||
parameters := tools.Parameters{
|
||||
actionParameter,
|
||||
projectParameter,
|
||||
modelParameter,
|
||||
exploreParameter,
|
||||
timeframeParameter,
|
||||
minQueriesParameter,
|
||||
}
|
||||
|
||||
mcpManifest := tools.GetMcpManifest(cfg.Name, cfg.Description, cfg.AuthRequired, parameters)
|
||||
|
||||
return Tool{
|
||||
Name: cfg.Name,
|
||||
Kind: kind,
|
||||
Parameters: parameters,
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
UseClientOAuth: s.UseClientOAuth,
|
||||
Client: s.Client,
|
||||
ApiSettings: s.ApiSettings,
|
||||
manifest: tools.Manifest{
|
||||
Description: cfg.Description,
|
||||
Parameters: parameters.Manifest(),
|
||||
AuthRequired: cfg.AuthRequired,
|
||||
},
|
||||
mcpManifest: mcpManifest,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var _ tools.Tool = Tool{}
|
||||
|
||||
type Tool struct {
|
||||
Name string `yaml:"name"`
|
||||
Kind string `yaml:"kind"`
|
||||
UseClientOAuth bool
|
||||
Client *v4.LookerSDK
|
||||
ApiSettings *rtl.ApiSettings
|
||||
AuthRequired []string `yaml:"authRequired"`
|
||||
Parameters tools.Parameters
|
||||
manifest tools.Manifest
|
||||
mcpManifest tools.McpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Invoke(ctx context.Context, params tools.ParamValues, accessToken tools.AccessToken) (any, error) {
|
||||
sdk, err := lookercommon.GetLookerSDK(t.UseClientOAuth, t.ApiSettings, t.Client, accessToken)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting sdk: %w", err)
|
||||
}
|
||||
|
||||
paramsMap := params.AsMap()
|
||||
timeframe, _ := paramsMap["timeframe"].(int)
|
||||
if timeframe == 0 {
|
||||
timeframe = 90
|
||||
}
|
||||
minQueries, _ := paramsMap["min_queries"].(int)
|
||||
if minQueries == 0 {
|
||||
minQueries = 1
|
||||
}
|
||||
|
||||
vacuumTool := &vacuumTool{
|
||||
SdkClient: sdk,
|
||||
timeframe: timeframe,
|
||||
minQueries: minQueries,
|
||||
}
|
||||
|
||||
action, ok := paramsMap["action"].(string)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("action parameter not found")
|
||||
}
|
||||
|
||||
switch action {
|
||||
case "models":
|
||||
project, _ := paramsMap["project"].(string)
|
||||
model, _ := paramsMap["model"].(string)
|
||||
return vacuumTool.models(ctx, project, model)
|
||||
case "explores":
|
||||
model, _ := paramsMap["model"].(string)
|
||||
explore, _ := paramsMap["explore"].(string)
|
||||
return vacuumTool.explores(ctx, model, explore)
|
||||
default:
|
||||
return nil, fmt.Errorf("unknown action: %s", action)
|
||||
}
|
||||
}
|
||||
|
||||
func (t Tool) ParseParams(data map[string]any, claims map[string]map[string]any) (tools.ParamValues, error) {
|
||||
return tools.ParseParams(t.Parameters, data, claims)
|
||||
}
|
||||
|
||||
func (t Tool) Manifest() tools.Manifest {
|
||||
return t.manifest
|
||||
}
|
||||
|
||||
func (t Tool) McpManifest() tools.McpManifest {
|
||||
return t.mcpManifest
|
||||
}
|
||||
|
||||
func (t Tool) Authorized(verifiedAuthServices []string) bool {
|
||||
return tools.IsAuthorized(t.AuthRequired, verifiedAuthServices)
|
||||
}
|
||||
|
||||
func (t Tool) RequiresClientAuthorization() bool {
|
||||
return t.UseClientOAuth
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END MCP SERVER CORE LOGIC
|
||||
// =================================================================================================================
|
||||
|
||||
// =================================================================================================================
|
||||
// START LOOKER HEALTH VACUUM CORE LOGIC
|
||||
// =================================================================================================================
|
||||
type vacuumTool struct {
|
||||
SdkClient *v4.LookerSDK
|
||||
timeframe int
|
||||
minQueries int
|
||||
}
|
||||
|
||||
func (t *vacuumTool) models(ctx context.Context, project, model string) ([]map[string]interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Vacuuming models...")
|
||||
|
||||
usedModels, err := t.getUsedModels(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching LookML models: %w", err)
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, m := range lookmlModels {
|
||||
if (project == "" || (m.ProjectName != nil && *m.ProjectName == project)) &&
|
||||
(model == "" || (m.Name != nil && *m.Name == model)) {
|
||||
|
||||
queryCount := 0
|
||||
if qc, ok := usedModels[*m.Name]; ok {
|
||||
queryCount = qc
|
||||
}
|
||||
|
||||
unusedExplores, err := t.getUnusedExplores(ctx, *m.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Model": *m.Name,
|
||||
"Unused Explores": unusedExplores,
|
||||
"Model Query Count": queryCount,
|
||||
})
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *vacuumTool) explores(ctx context.Context, model, explore string) ([]map[string]interface{}, error) {
|
||||
logger, err := util.LoggerFromContext(ctx)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unable to get logger from ctx: %s", err)
|
||||
}
|
||||
logger.InfoContext(ctx, "Vacuuming explores...")
|
||||
|
||||
lookmlModels, err := t.SdkClient.AllLookmlModels(v4.RequestAllLookmlModels{}, nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching LookML models: %w", err)
|
||||
}
|
||||
|
||||
var results []map[string]interface{}
|
||||
for _, m := range lookmlModels {
|
||||
if model != "" && (m.Name == nil || *m.Name != model) {
|
||||
continue
|
||||
}
|
||||
if m.Explores == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for _, e := range *m.Explores {
|
||||
if explore != "" && (e.Name == nil || *e.Name != explore) {
|
||||
continue
|
||||
}
|
||||
if e.Name == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
exploreDetail, err := t.SdkClient.LookmlModelExplore(v4.RequestLookmlModelExplore{
|
||||
LookmlModelName: *m.Name,
|
||||
ExploreName: *e.Name,
|
||||
}, nil)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error fetching detail for explore %s.%s: %v", *m.Name, *e.Name, err))
|
||||
continue
|
||||
}
|
||||
|
||||
usedFields, err := t.getUsedExploreFields(ctx, *m.Name, *e.Name)
|
||||
if err != nil {
|
||||
logger.ErrorContext(ctx, fmt.Sprintf("Error fetching used fields for explore %s.%s: %v", *m.Name, *e.Name, err))
|
||||
continue
|
||||
}
|
||||
|
||||
var allFields []string
|
||||
if exploreDetail.Fields != nil {
|
||||
for _, d := range *exploreDetail.Fields.Dimensions {
|
||||
if !*d.Hidden {
|
||||
allFields = append(allFields, *d.Name)
|
||||
}
|
||||
}
|
||||
for _, ms := range *exploreDetail.Fields.Measures {
|
||||
if !*ms.Hidden {
|
||||
allFields = append(allFields, *ms.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var unusedFields []string
|
||||
for _, field := range allFields {
|
||||
if _, ok := usedFields[field]; !ok {
|
||||
unusedFields = append(unusedFields, field)
|
||||
}
|
||||
}
|
||||
|
||||
joinStats := make(map[string]int)
|
||||
if exploreDetail.Joins != nil {
|
||||
for field, queryCount := range usedFields {
|
||||
join := strings.Split(field, ".")[0]
|
||||
joinStats[join] += queryCount
|
||||
}
|
||||
for _, join := range *exploreDetail.Joins {
|
||||
if _, ok := joinStats[*join.Name]; !ok {
|
||||
joinStats[*join.Name] = 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var unusedJoins []string
|
||||
for join, count := range joinStats {
|
||||
if count == 0 {
|
||||
unusedJoins = append(unusedJoins, join)
|
||||
}
|
||||
}
|
||||
|
||||
results = append(results, map[string]interface{}{
|
||||
"Model": *m.Name,
|
||||
"Explore": *e.Name,
|
||||
"Unused Joins": unusedJoins,
|
||||
"Unused Fields": unusedFields,
|
||||
})
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *vacuumTool) getUsedModels(ctx context.Context) (map[string]int, error) {
|
||||
limit := "5000"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"history.query_run_count", "query.model"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"query.model": "-system__activity, -i__looker",
|
||||
"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
|
||||
"user.dev_branch_name": "NULL",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(raw), &data)
|
||||
|
||||
results := make(map[string]int)
|
||||
for _, row := range data {
|
||||
model, _ := row["query.model"].(string)
|
||||
count, _ := row["history.query_run_count"].(float64)
|
||||
results[model] = int(count)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (t *vacuumTool) getUnusedExplores(ctx context.Context, modelName string) ([]string, error) {
|
||||
lookmlModel, err := t.SdkClient.LookmlModel(modelName, "", nil)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error fetching LookML model %s: %w", modelName, err)
|
||||
}
|
||||
|
||||
var unusedExplores []string
|
||||
if lookmlModel.Explores != nil {
|
||||
for _, e := range *lookmlModel.Explores {
|
||||
limit := "1"
|
||||
queryCountQueryBody := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"query.model": modelName,
|
||||
"query.view": *e.Name,
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"history.query_run_count": fmt.Sprintf(">%d", t.minQueries-1),
|
||||
"user.dev_branch_name": "NULL",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
|
||||
rawQueryCount, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, queryCountQueryBody, "json", nil)
|
||||
if err != nil {
|
||||
// Log the error but continue
|
||||
continue
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(rawQueryCount), &data)
|
||||
if len(data) == 0 {
|
||||
unusedExplores = append(unusedExplores, *e.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return unusedExplores, nil
|
||||
}
|
||||
|
||||
func (t *vacuumTool) getUsedExploreFields(ctx context.Context, model, explore string) (map[string]int, error) {
|
||||
limit := "5000"
|
||||
query := &v4.WriteQuery{
|
||||
Model: "system__activity",
|
||||
View: "history",
|
||||
Fields: &[]string{"query.formatted_fields", "query.filters", "history.query_run_count"},
|
||||
Filters: &map[string]any{
|
||||
"history.created_date": fmt.Sprintf("%d days", t.timeframe),
|
||||
"query.model": strings.ReplaceAll(model, "_", "^_"),
|
||||
"query.view": strings.ReplaceAll(explore, "_", "^_"),
|
||||
"query.formatted_fields": "-NULL",
|
||||
"history.workspace_id": "production",
|
||||
},
|
||||
Limit: &limit,
|
||||
}
|
||||
raw, err := lookercommon.RunInlineQuery(ctx, t.SdkClient, query, "json", nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var data []map[string]interface{}
|
||||
_ = json.Unmarshal([]byte(raw), &data)
|
||||
|
||||
results := make(map[string]int)
|
||||
fieldRegex := regexp.MustCompile(`(\w+\.\w+)`)
|
||||
|
||||
for _, row := range data {
|
||||
count, _ := row["history.query_run_count"].(float64)
|
||||
formattedFields, _ := row["query.formatted_fields"].(string)
|
||||
filters, _ := row["query.filters"].(string)
|
||||
|
||||
usedFields := make(map[string]bool)
|
||||
|
||||
for _, field := range fieldRegex.FindAllString(formattedFields, -1) {
|
||||
results[field] += int(count)
|
||||
usedFields[field] = true
|
||||
}
|
||||
|
||||
for _, field := range fieldRegex.FindAllString(filters, -1) {
|
||||
if _, ok := usedFields[field]; !ok {
|
||||
results[field] += int(count)
|
||||
}
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// =================================================================================================================
|
||||
// END LOOKER HEALTH VACUUM CORE LOGIC
|
||||
// =================================================================================================================
|
||||
@@ -0,0 +1,113 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package lookerhealthvacuum_test
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
yaml "github.com/goccy/go-yaml"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/googleapis/genai-toolbox/internal/server"
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
lhv "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
|
||||
)
|
||||
|
||||
func TestParseFromYamlLookerHealthVacuum(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
want server.ToolConfigs
|
||||
}{
|
||||
{
|
||||
desc: "basic example",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-vacuum
|
||||
source: my-instance
|
||||
description: some description
|
||||
`,
|
||||
want: server.ToolConfigs{
|
||||
"example_tool": lhv.Config{
|
||||
Name: "example_tool",
|
||||
Kind: "looker-health-vacuum",
|
||||
Source: "my-instance",
|
||||
Description: "some description",
|
||||
AuthRequired: []string{},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to unmarshal: %s", err)
|
||||
}
|
||||
if diff := cmp.Diff(tc.want, got.Tools); diff != "" {
|
||||
t.Fatalf("incorrect parse: diff %v", diff)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestFailParseFromYamlLookerHealthVacuum(t *testing.T) {
|
||||
ctx, err := testutils.ContextWithNewLogger()
|
||||
if err != nil {
|
||||
t.Fatalf("unexpected error: %s", err)
|
||||
}
|
||||
tcs := []struct {
|
||||
desc string
|
||||
in string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
desc: "Invalid field",
|
||||
in: `
|
||||
tools:
|
||||
example_tool:
|
||||
kind: looker-health-vacuum
|
||||
source: my-instance
|
||||
invalid_field: true
|
||||
`,
|
||||
err: "unable to parse tool \"example_tool\" as kind \"looker-health-vacuum\": [2:1] unknown field \"invalid_field\"",
|
||||
},
|
||||
}
|
||||
for _, tc := range tcs {
|
||||
t.Run(tc.desc, func(t *testing.T) {
|
||||
got := struct {
|
||||
Tools server.ToolConfigs `yaml:"tools"`
|
||||
}{}
|
||||
// Parse contents
|
||||
err := yaml.UnmarshalContext(ctx, testutils.FormatYaml(tc.in), &got)
|
||||
if err == nil {
|
||||
t.Fatalf("expect parsing to fail")
|
||||
}
|
||||
errStr := err.Error()
|
||||
if !strings.Contains(errStr, tc.err) {
|
||||
t.Fatalf("unexpected error string: got %q, want substring %q", errStr, tc.err)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -145,11 +145,10 @@ const listTablesStatement = `
|
||||
JSON_ARRAYAGG(S.COLUMN_NAME) AS INDEX_COLUMNS_ARRAY
|
||||
FROM
|
||||
INFORMATION_SCHEMA.STATISTICS S
|
||||
WHERE
|
||||
S.TABLE_SCHEMA = T.TABLE_SCHEMA AND S.TABLE_NAME = T.TABLE_NAME
|
||||
GROUP BY
|
||||
S.TABLE_SCHEMA, S.TABLE_NAME, S.INDEX_NAME
|
||||
) AS IndexData
|
||||
WHERE IndexData.TABLE_SCHEMA = T.TABLE_SCHEMA AND IndexData.TABLE_NAME = T.TABLE_NAME
|
||||
ORDER BY IndexData.INDEX_NAME
|
||||
),
|
||||
'triggers', (
|
||||
|
||||
@@ -174,7 +174,7 @@ func TestBigQueryToolEndpoints(t *testing.T) {
|
||||
ddlWant := `"Query executed successfully and returned no content."`
|
||||
dataInsightsWant := `(?s)Schema Resolved.*Retrieval Query.*SQL Generated.*Answer`
|
||||
// Partial message; the full error message is too long.
|
||||
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"final query validation failed: failed to insert dry run job: googleapi: Error 400: Syntax error: Unexpected identifier \"SELEC\" at [1:1]`
|
||||
mcpMyFailToolWant := `{"jsonrpc":"2.0","id":"invoke-fail-tool","result":{"content":[{"type":"text","text":"query validation failed: failed to insert dry run job: googleapi: Error 400: Syntax error: Unexpected identifier \"SELEC\" at [1:1]`
|
||||
mcpSelect1Want := `{"jsonrpc":"2.0","id":"invoke my-auth-required-tool","result":{"content":[{"type":"text","text":"{\"f0_\":1}"}]}}`
|
||||
createColArray := `["id INT64", "name STRING", "age INT64"]`
|
||||
selectEmptyWant := `"The query returned 0 rows."`
|
||||
@@ -264,11 +264,21 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
|
||||
|
||||
// Configure tool
|
||||
toolsConfig := map[string]any{
|
||||
"list-dataset-ids-restricted": map[string]any{
|
||||
"kind": "bigquery-list-dataset-ids",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to list dataset ids",
|
||||
},
|
||||
"list-table-ids-restricted": map[string]any{
|
||||
"kind": "bigquery-list-table-ids",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to list table within a dataset",
|
||||
},
|
||||
"get-table-info-restricted": map[string]any{
|
||||
"kind": "bigquery-get-table-info",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to get table info",
|
||||
},
|
||||
"execute-sql-restricted": map[string]any{
|
||||
"kind": "bigquery-execute-sql",
|
||||
"source": "my-instance",
|
||||
@@ -310,8 +320,11 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
|
||||
}
|
||||
|
||||
// Run tests
|
||||
runListDatasetIdsWithRestriction(t, allowedDatasetName1, allowedDatasetName2)
|
||||
runListTableIdsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, allowedForecastTableName1)
|
||||
runListTableIdsWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, allowedForecastTableName2)
|
||||
runGetTableInfoWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName)
|
||||
runGetTableInfoWithRestriction(t, allowedDatasetName2, disallowedDatasetName, allowedTableName2, disallowedTableName)
|
||||
runExecuteSqlWithRestriction(t, allowedTableNameParam1, disallowedTableNameParam)
|
||||
runExecuteSqlWithRestriction(t, allowedTableNameParam2, disallowedTableNameParam)
|
||||
runConversationalAnalyticsWithRestriction(t, allowedDatasetName1, disallowedDatasetName, allowedTableName1, disallowedTableName)
|
||||
@@ -320,6 +333,165 @@ func TestBigQueryToolWithDatasetRestriction(t *testing.T) {
|
||||
runForecastWithRestriction(t, allowedForecastTableFullName2, disallowedForecastTableFullName)
|
||||
}
|
||||
|
||||
func TestBigQueryWriteModeAllowed(t *testing.T) {
|
||||
sourceConfig := getBigQueryVars(t)
|
||||
sourceConfig["writeMode"] = "allowed"
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
datasetName := fmt.Sprintf("temp_toolbox_test_allowed_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
|
||||
client, err := initBigQueryConnection(BigqueryProject)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create BigQuery connection: %s", err)
|
||||
}
|
||||
|
||||
dataset := client.Dataset(datasetName)
|
||||
if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: datasetName}); err != nil {
|
||||
t.Fatalf("Failed to create dataset %q: %v", datasetName, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := dataset.DeleteWithContents(ctx); err != nil {
|
||||
t.Logf("failed to cleanup dataset %s: %v", datasetName, err)
|
||||
}
|
||||
}()
|
||||
|
||||
toolsFile := map[string]any{
|
||||
"sources": map[string]any{
|
||||
"my-instance": sourceConfig,
|
||||
},
|
||||
"tools": map[string]any{
|
||||
"my-exec-sql-tool": map[string]any{
|
||||
"kind": "bigquery-execute-sql",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to execute sql",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
runBigQueryWriteModeAllowedTest(t, datasetName)
|
||||
}
|
||||
|
||||
func TestBigQueryWriteModeBlocked(t *testing.T) {
|
||||
sourceConfig := getBigQueryVars(t)
|
||||
sourceConfig["writeMode"] = "blocked"
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
datasetName := fmt.Sprintf("temp_toolbox_test_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
tableName := fmt.Sprintf("param_table_blocked_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
tableNameParam := fmt.Sprintf("`%s.%s.%s`", BigqueryProject, datasetName, tableName)
|
||||
|
||||
client, err := initBigQueryConnection(BigqueryProject)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create BigQuery connection: %s", err)
|
||||
}
|
||||
createParamTableStmt, insertParamTableStmt, _, _, _, _, paramTestParams := getBigQueryParamToolInfo(tableNameParam)
|
||||
teardownTable := setupBigQueryTable(t, ctx, client, createParamTableStmt, insertParamTableStmt, datasetName, tableNameParam, paramTestParams)
|
||||
defer teardownTable(t)
|
||||
|
||||
toolsFile := map[string]any{
|
||||
"sources": map[string]any{"my-instance": sourceConfig},
|
||||
"tools": map[string]any{
|
||||
"my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"},
|
||||
},
|
||||
}
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
runBigQueryWriteModeBlockedTest(t, tableNameParam, datasetName)
|
||||
}
|
||||
|
||||
func TestBigQueryWriteModeProtected(t *testing.T) {
|
||||
sourceConfig := getBigQueryVars(t)
|
||||
sourceConfig["writeMode"] = "protected"
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
permanentDatasetName := fmt.Sprintf("perm_dataset_protected_%s", strings.ReplaceAll(uuid.New().String(), "-", ""))
|
||||
client, err := initBigQueryConnection(BigqueryProject)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create BigQuery connection: %s", err)
|
||||
}
|
||||
dataset := client.Dataset(permanentDatasetName)
|
||||
if err := dataset.Create(ctx, &bigqueryapi.DatasetMetadata{Name: permanentDatasetName}); err != nil {
|
||||
t.Fatalf("Failed to create dataset %q: %v", permanentDatasetName, err)
|
||||
}
|
||||
defer func() {
|
||||
if err := dataset.DeleteWithContents(ctx); err != nil {
|
||||
t.Logf("failed to cleanup dataset %s: %v", permanentDatasetName, err)
|
||||
}
|
||||
}()
|
||||
|
||||
toolsFile := map[string]any{
|
||||
"sources": map[string]any{"my-instance": sourceConfig},
|
||||
"tools": map[string]any{
|
||||
"my-exec-sql-tool": map[string]any{"kind": "bigquery-execute-sql", "source": "my-instance", "description": "Tool to execute sql"},
|
||||
"my-sql-tool-protected": map[string]any{
|
||||
"kind": "bigquery-sql",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to query from the session",
|
||||
"statement": "SELECT * FROM my_shared_temp_table",
|
||||
},
|
||||
"my-forecast-tool-protected": map[string]any{
|
||||
"kind": "bigquery-forecast",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to forecast from session temp table",
|
||||
},
|
||||
"my-analyze-contribution-tool-protected": map[string]any{
|
||||
"kind": "bigquery-analyze-contribution",
|
||||
"source": "my-instance",
|
||||
"description": "Tool to analyze contribution from session temp table",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
cmd, cleanup, err := tests.StartCmd(ctx, toolsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanup()
|
||||
|
||||
waitCtx, cancel := context.WithTimeout(ctx, 10*time.Second)
|
||||
defer cancel()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
runBigQueryWriteModeProtectedTest(t, permanentDatasetName)
|
||||
}
|
||||
|
||||
// getBigQueryParamToolInfo returns statements and param for my-tool for bigquery kind
|
||||
func getBigQueryParamToolInfo(tableName string) (string, string, string, string, string, string, []bigqueryapi.QueryParameter) {
|
||||
createStatement := fmt.Sprintf(`
|
||||
@@ -894,6 +1066,223 @@ func runBigQueryExecuteSqlToolInvokeTest(t *testing.T, select1Want, invokeParamW
|
||||
}
|
||||
}
|
||||
|
||||
// runInvokeRequest sends a POST request to the given API endpoint and returns the response and parsed JSON body.
|
||||
func runInvokeRequest(t *testing.T, api, body string, headers map[string]string) (*http.Response, map[string]interface{}) {
|
||||
t.Helper()
|
||||
req, err := http.NewRequest(http.MethodPost, api, bytes.NewBufferString(body))
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create request: %v", err)
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
for k, v := range headers {
|
||||
req.Header.Add(k, v)
|
||||
}
|
||||
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to send request: %v", err)
|
||||
}
|
||||
|
||||
var result map[string]interface{}
|
||||
// Use a TeeReader to be able to read the body multiple times (for logging on failure)
|
||||
bodyBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to read response body: %v", err)
|
||||
}
|
||||
resp.Body.Close() // Close original body
|
||||
resp.Body = io.NopCloser(bytes.NewBuffer(bodyBytes)) // Replace with a new reader
|
||||
|
||||
if err := json.Unmarshal(bodyBytes, &result); err != nil {
|
||||
t.Logf("Failed to decode response body: %s", string(bodyBytes))
|
||||
t.Fatalf("failed to decode response: %v", err)
|
||||
}
|
||||
return resp, result
|
||||
}
|
||||
|
||||
func runBigQueryWriteModeAllowedTest(t *testing.T, datasetName string) {
|
||||
t.Run("CREATE TABLE should succeed", func(t *testing.T) {
|
||||
sql := fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName)
|
||||
body := fmt.Sprintf(`{"sql": "%s"}`, sql)
|
||||
resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil)
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, http.StatusOK, string(bodyBytes))
|
||||
}
|
||||
|
||||
resStr, ok := result["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected 'result' field in response, got %v", result)
|
||||
}
|
||||
if resStr != `"Query executed successfully and returned no content."` {
|
||||
t.Errorf("unexpected result: got %q, want %q", resStr, `"Query executed successfully and returned no content."`)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func runBigQueryWriteModeBlockedTest(t *testing.T, tableNameParam, datasetName string) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
sql string
|
||||
wantStatusCode int
|
||||
wantInError string
|
||||
wantResult string
|
||||
}{
|
||||
{"SELECT statement should succeed", fmt.Sprintf("SELECT * FROM %s WHERE id = 1", tableNameParam), http.StatusOK, "", `[{"id":1,"name":"Alice"}]`},
|
||||
{"INSERT statement should fail", fmt.Sprintf("INSERT INTO %s (id, name) VALUES (10, 'test')", tableNameParam), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""},
|
||||
{"CREATE TABLE statement should fail", fmt.Sprintf("CREATE TABLE %s.new_table (x INT64)", datasetName), http.StatusBadRequest, "write mode is 'blocked', only SELECT statements are allowed", ""},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
body := fmt.Sprintf(`{"sql": "%s"}`, tc.sql)
|
||||
resp, result := runInvokeRequest(t, "http://127.0.0.1:5000/api/tool/my-exec-sql-tool/invoke", body, nil)
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != tc.wantStatusCode {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantInError != "" {
|
||||
errStr, ok := result["error"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected 'error' field in response, got %v", result)
|
||||
}
|
||||
if !strings.Contains(errStr, tc.wantInError) {
|
||||
t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr)
|
||||
}
|
||||
}
|
||||
if tc.wantResult != "" {
|
||||
resStr, ok := result["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected 'result' field in response, got %v", result)
|
||||
}
|
||||
if resStr != tc.wantResult {
|
||||
t.Fatalf("unexpected result: got %q, want %q", resStr, tc.wantResult)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runBigQueryWriteModeProtectedTest(t *testing.T, permanentDatasetName string) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
toolName string
|
||||
requestBody string
|
||||
wantStatusCode int
|
||||
wantInError string
|
||||
wantResult string
|
||||
}{
|
||||
{
|
||||
name: "CREATE TABLE to permanent dataset should fail",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: fmt.Sprintf(`{"sql": "CREATE TABLE %s.new_table (x INT64)"}`, permanentDatasetName),
|
||||
wantStatusCode: http.StatusBadRequest,
|
||||
wantInError: "protected write mode only supports SELECT statements, or write operations in the anonymous dataset",
|
||||
wantResult: "",
|
||||
},
|
||||
{
|
||||
name: "CREATE TEMP TABLE should succeed",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: `{"sql": "CREATE TEMP TABLE my_shared_temp_table (x INT64)"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"Query executed successfully and returned no content."`,
|
||||
},
|
||||
{
|
||||
name: "INSERT into TEMP TABLE should succeed",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: `{"sql": "INSERT INTO my_shared_temp_table (x) VALUES (42)"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"Query executed successfully and returned no content."`,
|
||||
},
|
||||
{
|
||||
name: "SELECT from TEMP TABLE with exec-sql should succeed",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: `{"sql": "SELECT * FROM my_shared_temp_table"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `[{"x":42}]`,
|
||||
},
|
||||
{
|
||||
name: "SELECT from TEMP TABLE with sql-tool should succeed",
|
||||
toolName: "my-sql-tool-protected",
|
||||
requestBody: `{}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `[{"x":42}]`,
|
||||
},
|
||||
{
|
||||
name: "CREATE TEMP TABLE for forecast should succeed",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: `{"sql": "CREATE TEMP TABLE forecast_temp_table (ts TIMESTAMP, data FLOAT64) AS SELECT TIMESTAMP('2025-01-01T00:00:00Z') AS ts, 10.0 AS data UNION ALL SELECT TIMESTAMP('2025-01-01T01:00:00Z'), 11.0 UNION ALL SELECT TIMESTAMP('2025-01-01T02:00:00Z'), 12.0 UNION ALL SELECT TIMESTAMP('2025-01-01T03:00:00Z'), 13.0"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"Query executed successfully and returned no content."`,
|
||||
},
|
||||
{
|
||||
name: "Forecast from TEMP TABLE should succeed",
|
||||
toolName: "my-forecast-tool-protected",
|
||||
requestBody: `{"history_data": "SELECT * FROM forecast_temp_table", "timestamp_col": "ts", "data_col": "data", "horizon": 1}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"forecast_timestamp"`,
|
||||
},
|
||||
{
|
||||
name: "CREATE TEMP TABLE for contribution analysis should succeed",
|
||||
toolName: "my-exec-sql-tool",
|
||||
requestBody: `{"sql": "CREATE TEMP TABLE contribution_temp_table (dim1 STRING, is_test BOOL, metric FLOAT64) AS SELECT 'a' as dim1, true as is_test, 100.0 as metric UNION ALL SELECT 'b', false, 120.0"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"Query executed successfully and returned no content."`,
|
||||
},
|
||||
{
|
||||
name: "Analyze contribution from TEMP TABLE should succeed",
|
||||
toolName: "my-analyze-contribution-tool-protected",
|
||||
requestBody: `{"input_data": "SELECT * FROM contribution_temp_table", "contribution_metric": "SUM(metric)", "is_test_col": "is_test", "dimension_id_cols": ["dim1"]}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInError: "",
|
||||
wantResult: `"relative_difference"`,
|
||||
},
|
||||
}
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
api := fmt.Sprintf("http://127.0.0.1:5000/api/tool/%s/invoke", tc.toolName)
|
||||
resp, result := runInvokeRequest(t, api, tc.requestBody, nil)
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != tc.wantStatusCode {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantInError != "" {
|
||||
errStr, ok := result["error"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected 'error' field in response, got %v", result)
|
||||
}
|
||||
if !strings.Contains(errStr, tc.wantInError) {
|
||||
t.Fatalf("expected error message to contain %q, but got %q", tc.wantInError, errStr)
|
||||
}
|
||||
}
|
||||
|
||||
if tc.wantResult != "" {
|
||||
resStr, ok := result["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("expected 'result' field in response, got %v", result)
|
||||
}
|
||||
if !strings.Contains(resStr, tc.wantResult) {
|
||||
t.Fatalf("expected %q to contain %q, but it did not", resStr, tc.wantResult)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runBigQueryExecuteSqlToolInvokeDryRunTest(t *testing.T, datasetName string) {
|
||||
// Get ID token
|
||||
idToken, err := tests.GetGoogleIdToken(tests.ClientId)
|
||||
@@ -2080,6 +2469,43 @@ func runBigQueryConversationalAnalyticsInvokeTest(t *testing.T, datasetName, tab
|
||||
}
|
||||
}
|
||||
|
||||
func runListDatasetIdsWithRestriction(t *testing.T, allowedDatasetName1, allowedDatasetName2 string) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
wantStatusCode int
|
||||
wantResult string
|
||||
}{
|
||||
{
|
||||
name: "invoke list-dataset-ids with restriction",
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantResult: fmt.Sprintf(`["%s.%s","%s.%s"]`, BigqueryProject, allowedDatasetName1, BigqueryProject, allowedDatasetName2),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
body := bytes.NewBuffer([]byte(`{}`))
|
||||
resp, bodyBytes := tests.RunRequest(t, http.MethodPost, "http://127.0.0.1:5000/api/tool/list-dataset-ids-restricted/invoke", body, nil)
|
||||
|
||||
if resp.StatusCode != tc.wantStatusCode {
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
var respBody map[string]interface{}
|
||||
if err := json.Unmarshal(bodyBytes, &respBody); err != nil {
|
||||
t.Fatalf("error parsing response body: %v", err)
|
||||
}
|
||||
got, ok := respBody["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("unable to find result in response body")
|
||||
}
|
||||
if got != tc.wantResult {
|
||||
t.Errorf("unexpected result: got %q, want %q", got, tc.wantResult)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runListTableIdsWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName string, allowedTableNames ...string) {
|
||||
sort.Strings(allowedTableNames)
|
||||
var quotedNames []string
|
||||
@@ -2163,6 +2589,58 @@ func runListTableIdsWithRestriction(t *testing.T, allowedDatasetName, disallowed
|
||||
}
|
||||
}
|
||||
|
||||
func runGetTableInfoWithRestriction(t *testing.T, allowedDatasetName, disallowedDatasetName, allowedTableName, disallowedTableName string) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
dataset string
|
||||
table string
|
||||
wantStatusCode int
|
||||
wantInError string
|
||||
}{
|
||||
{
|
||||
name: "invoke on allowed table",
|
||||
dataset: allowedDatasetName,
|
||||
table: allowedTableName,
|
||||
wantStatusCode: http.StatusOK,
|
||||
},
|
||||
{
|
||||
name: "invoke on disallowed table",
|
||||
dataset: disallowedDatasetName,
|
||||
table: disallowedTableName,
|
||||
wantStatusCode: http.StatusBadRequest,
|
||||
wantInError: fmt.Sprintf("access denied to dataset '%s'", disallowedDatasetName),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
body := bytes.NewBuffer([]byte(fmt.Sprintf(`{"dataset":"%s", "table":"%s"}`, tc.dataset, tc.table)))
|
||||
req, err := http.NewRequest(http.MethodPost, "http://127.0.0.1:5000/api/tool/get-table-info-restricted/invoke", body)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to create request: %s", err)
|
||||
}
|
||||
req.Header.Add("Content-type", "application/json")
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
t.Fatalf("unable to send request: %s", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode != tc.wantStatusCode {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantInError != "" {
|
||||
bodyBytes, _ := io.ReadAll(resp.Body)
|
||||
if !strings.Contains(string(bodyBytes), tc.wantInError) {
|
||||
t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func runExecuteSqlWithRestriction(t *testing.T, allowedTableFullName, disallowedTableFullName string) {
|
||||
allowedTableParts := strings.Split(strings.Trim(allowedTableFullName, "`"), ".")
|
||||
if len(allowedTableParts) != 3 {
|
||||
|
||||
@@ -123,6 +123,9 @@ func TestCloudSQLMSSQLToolEndpoints(t *testing.T) {
|
||||
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupMSSQLTables(t, ctx, db)
|
||||
|
||||
// create table name with UUID
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
@@ -110,6 +110,9 @@ func TestCloudSQLMySQLToolEndpoints(t *testing.T) {
|
||||
t.Fatalf("unable to create Cloud SQL connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupMySQLTables(t, ctx, pool)
|
||||
|
||||
// create table name with UUID
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
@@ -833,7 +833,7 @@ func CleanupPostgresTables(t *testing.T, ctx context.Context, pool *pgxpool.Pool
|
||||
}
|
||||
|
||||
dropQuery := fmt.Sprintf("DROP TABLE IF EXISTS %s CASCADE;", strings.Join(tablesToDrop, ", "))
|
||||
|
||||
|
||||
if _, err := pool.Exec(ctx, dropQuery); err != nil {
|
||||
t.Fatalf("Failed to drop all tables in 'public' schema: %v", err)
|
||||
}
|
||||
@@ -871,7 +871,7 @@ func CleanupMySQLTables(t *testing.T, ctx context.Context, pool *sql.DB) {
|
||||
}
|
||||
|
||||
dropQuery := fmt.Sprintf("DROP TABLE IF EXISTS %s;", strings.Join(tablesToDrop, ", "))
|
||||
|
||||
|
||||
if _, err := pool.ExecContext(ctx, dropQuery); err != nil {
|
||||
// Try to re-enable checks even if drop fails
|
||||
if _, err := pool.ExecContext(ctx, "SET FOREIGN_KEY_CHECKS = 1;"); err != nil {
|
||||
|
||||
138
tests/dataform/dataform_integration_test.go
Normal file
138
tests/dataform/dataform_integration_test.go
Normal file
@@ -0,0 +1,138 @@
|
||||
// Copyright 2025 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dataformcompilelocal
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/googleapis/genai-toolbox/internal/testutils"
|
||||
"github.com/googleapis/genai-toolbox/tests"
|
||||
)
|
||||
|
||||
// setupTestProject creates a minimal dataform project using the 'dataform init' CLI.
|
||||
// It returns the path to the directory and a cleanup function.
|
||||
func setupTestProject(t *testing.T) (string, func()) {
|
||||
tmpDir, err := os.MkdirTemp("", "dataform-project-*")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp dir: %v", err)
|
||||
}
|
||||
cleanup := func() {
|
||||
os.RemoveAll(tmpDir)
|
||||
}
|
||||
|
||||
cmd := exec.Command("dataform", "init", tmpDir, "test-project-id", "US")
|
||||
if output, err := cmd.CombinedOutput(); err != nil {
|
||||
cleanup()
|
||||
t.Fatalf("Failed to run 'dataform init': %v\nOutput: %s", err, string(output))
|
||||
}
|
||||
|
||||
definitionsDir := filepath.Join(tmpDir, "definitions")
|
||||
exampleSQLX := `config { type: "table" } SELECT 1 AS test_col`
|
||||
err = os.WriteFile(filepath.Join(definitionsDir, "example.sqlx"), []byte(exampleSQLX), 0644)
|
||||
if err != nil {
|
||||
cleanup()
|
||||
t.Fatalf("Failed to write example.sqlx: %v", err)
|
||||
}
|
||||
|
||||
return tmpDir, cleanup
|
||||
}
|
||||
|
||||
func TestDataformCompileTool(t *testing.T) {
|
||||
if _, err := exec.LookPath("dataform"); err != nil {
|
||||
t.Skip("dataform CLI not found in $PATH, skipping integration test")
|
||||
}
|
||||
|
||||
projectDir, cleanupProject := setupTestProject(t)
|
||||
defer cleanupProject()
|
||||
|
||||
toolsFile := map[string]any{
|
||||
"tools": map[string]any{
|
||||
"my-dataform-compiler": map[string]any{
|
||||
"kind": "dataform-compile-local",
|
||||
"description": "Tool to compile dataform projects",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
cmd, cleanupServer, err := tests.StartCmd(ctx, toolsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("command initialization returned an error: %s", err)
|
||||
}
|
||||
defer cleanupServer()
|
||||
|
||||
waitCtx, cancelWait := context.WithTimeout(ctx, 30*time.Second)
|
||||
defer cancelWait()
|
||||
out, err := testutils.WaitForString(waitCtx, regexp.MustCompile(`Server ready to serve`), cmd.Out)
|
||||
if err != nil {
|
||||
t.Logf("toolbox command logs: \n%s", out)
|
||||
t.Fatalf("toolbox didn't start successfully: %s", err)
|
||||
}
|
||||
|
||||
nonExistentDir := filepath.Join(os.TempDir(), "non-existent-dir")
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
reqBody string
|
||||
wantStatus int
|
||||
wantBody string // Substring to check for in the response
|
||||
}{
|
||||
{
|
||||
name: "success case",
|
||||
reqBody: fmt.Sprintf(`{"project_dir":"%s"}`, projectDir),
|
||||
wantStatus: http.StatusOK,
|
||||
wantBody: "test_col",
|
||||
},
|
||||
{
|
||||
name: "missing parameter",
|
||||
reqBody: `{}`,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantBody: `parameter \"project_dir\" is required`,
|
||||
},
|
||||
{
|
||||
name: "non-existent directory",
|
||||
reqBody: fmt.Sprintf(`{"project_dir":"%s"}`, nonExistentDir),
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantBody: "error executing dataform compile",
|
||||
},
|
||||
}
|
||||
|
||||
api := "http://127.0.0.1:5000/api/tool/my-dataform-compiler/invoke"
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
resp, bodyBytes := tests.RunRequest(t, http.MethodPost, api, strings.NewReader(tc.reqBody), nil)
|
||||
|
||||
if resp.StatusCode != tc.wantStatus {
|
||||
t.Fatalf("unexpected status: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatus, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantBody != "" && !strings.Contains(string(bodyBytes), tc.wantBody) {
|
||||
t.Fatalf("expected body to contain %q, got: %s", tc.wantBody, string(bodyBytes))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -15,9 +15,14 @@
|
||||
package looker
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@@ -33,6 +38,8 @@ var (
|
||||
LookerVerifySsl = os.Getenv("LOOKER_VERIFY_SSL")
|
||||
LookerClientId = os.Getenv("LOOKER_CLIENT_ID")
|
||||
LookerClientSecret = os.Getenv("LOOKER_CLIENT_SECRET")
|
||||
LookerProject = os.Getenv("LOOKER_PROJECT")
|
||||
LookerLocation = os.Getenv("LOOKER_LOCATION")
|
||||
)
|
||||
|
||||
func getLookerVars(t *testing.T) map[string]any {
|
||||
@@ -45,6 +52,10 @@ func getLookerVars(t *testing.T) map[string]any {
|
||||
t.Fatal("'LOOKER_CLIENT_ID' not set")
|
||||
case LookerClientSecret:
|
||||
t.Fatal("'LOOKER_CLIENT_SECRET' not set")
|
||||
case LookerProject:
|
||||
t.Fatal("'LOOKER_PROJECT' not set")
|
||||
case LookerLocation:
|
||||
t.Fatal("'LOOKER_LOCATION' not set")
|
||||
}
|
||||
|
||||
return map[string]any{
|
||||
@@ -53,12 +64,14 @@ func getLookerVars(t *testing.T) map[string]any {
|
||||
"verify_ssl": (LookerVerifySsl == "true"),
|
||||
"client_id": LookerClientId,
|
||||
"client_secret": LookerClientSecret,
|
||||
"project": LookerProject,
|
||||
"location": LookerLocation,
|
||||
}
|
||||
}
|
||||
|
||||
func TestLooker(t *testing.T) {
|
||||
sourceConfig := getLookerVars(t)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), time.Minute)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
testLogger, err := log.NewStdLogger(os.Stdout, os.Stderr, "info")
|
||||
@@ -70,6 +83,7 @@ func TestLooker(t *testing.T) {
|
||||
var args []string
|
||||
|
||||
// Write config into a file and pass it to command
|
||||
|
||||
toolsFile := map[string]any{
|
||||
"sources": map[string]any{
|
||||
"my-instance": sourceConfig,
|
||||
@@ -130,6 +144,26 @@ func TestLooker(t *testing.T) {
|
||||
"source": "my-instance",
|
||||
"description": "Simple tool to test end to end functionality.",
|
||||
},
|
||||
"conversational_analytics": map[string]any{
|
||||
"kind": "looker-conversational-analytics",
|
||||
"source": "my-instance",
|
||||
"description": "Simple tool to test end to end functionality.",
|
||||
},
|
||||
"health_pulse": map[string]any{
|
||||
"kind": "looker-health-pulse",
|
||||
"source": "my-instance",
|
||||
"description": "Checks the health of a Looker instance by running a series of checks on the system.",
|
||||
},
|
||||
"health_analyze": map[string]any{
|
||||
"kind": "looker-health-analyze",
|
||||
"source": "my-instance",
|
||||
"description": "Provides analysis of a Looker instance's projects, models, or explores.",
|
||||
},
|
||||
"health_vacuum": map[string]any{
|
||||
"kind": "looker-health-vacuum",
|
||||
"source": "my-instance",
|
||||
"description": "Vacuums unused content from a Looker instance.",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -618,6 +652,160 @@ func TestLooker(t *testing.T) {
|
||||
},
|
||||
)
|
||||
|
||||
tests.RunToolGetTestByName(t, "conversational_analytics",
|
||||
map[string]any{
|
||||
"conversational_analytics": map[string]any{
|
||||
"description": "Simple tool to test end to end functionality.",
|
||||
"authRequired": []any{},
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The user's question, potentially including conversation history and system instructions for context.",
|
||||
"name": "user_query_with_context",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "An Array of at least one and up to 5 explore references like [{'model': 'MODEL_NAME', 'explore': 'EXPLORE_NAME'}]",
|
||||
"items": map[string]any{
|
||||
"additionalProperties": true,
|
||||
"authSources": []any{},
|
||||
"name": "explore_reference",
|
||||
"description": "An explore reference like {'model': 'MODEL_NAME', 'explore': 'EXPLORE_NAME'}",
|
||||
"required": true,
|
||||
"type": "object",
|
||||
},
|
||||
"name": "explore_references",
|
||||
"required": true,
|
||||
"type": "array",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
tests.RunToolGetTestByName(t, "health_pulse",
|
||||
map[string]any{
|
||||
"health_pulse": map[string]any{
|
||||
"description": "Checks the health of a Looker instance by running a series of checks on the system.",
|
||||
"authRequired": []any{},
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The health check to run. Can be either: `check_db_connections`, `check_dashboard_performance`,`check_dashboard_errors`,`check_explore_performance`,`check_schedule_failures`, or `check_legacy_features`",
|
||||
"name": "action",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
tests.RunToolGetTestByName(t, "health_analyze",
|
||||
map[string]any{
|
||||
"health_analyze": map[string]any{
|
||||
"description": "Provides analysis of a Looker instance's projects, models, or explores.",
|
||||
"authRequired": []any{},
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The analysis to run. Can be 'projects', 'models', or 'explores'.",
|
||||
"name": "action",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker project to analyze (optional).",
|
||||
"name": "project",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker model to analyze (optional).",
|
||||
"name": "model",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker explore to analyze (optional).",
|
||||
"name": "explore",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The timeframe in days to analyze.",
|
||||
"name": "timeframe",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The minimum number of queries for a model or explore to be considered used.",
|
||||
"name": "min_queries",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
tests.RunToolGetTestByName(t, "health_vacuum",
|
||||
map[string]any{
|
||||
"health_vacuum": map[string]any{
|
||||
"description": "Vacuums unused content from a Looker instance.",
|
||||
"authRequired": []any{},
|
||||
"parameters": []any{
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The vacuum action to run. Can be 'models', or 'explores'.",
|
||||
"name": "action",
|
||||
"required": true,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker project to vacuum (optional).",
|
||||
"name": "project",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker model to vacuum (optional).",
|
||||
"name": "model",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The Looker explore to vacuum (optional).",
|
||||
"name": "explore",
|
||||
"required": false,
|
||||
"type": "string",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The timeframe in days to analyze.",
|
||||
"name": "timeframe",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
},
|
||||
map[string]any{
|
||||
"authSources": []any{},
|
||||
"description": "The minimum number of queries for a model or explore to be considered used.",
|
||||
"name": "min_queries",
|
||||
"required": false,
|
||||
"type": "integer",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
wantResult := "{\"label\":\"System Activity\",\"name\":\"system__activity\",\"project_name\":\"system__activity\"}"
|
||||
tests.RunToolInvokeSimpleTest(t, "get_models", wantResult)
|
||||
|
||||
@@ -651,4 +839,87 @@ func TestLooker(t *testing.T) {
|
||||
|
||||
wantResult = "null"
|
||||
tests.RunToolInvokeParametersTest(t, "get_dashboards", []byte(`{"title": "FOO", "desc": "BAR"}`), wantResult)
|
||||
|
||||
runConversationalAnalytics(t, "system__activity", "content_usage")
|
||||
|
||||
wantResult = "\"Connection\":\"thelook\""
|
||||
tests.RunToolInvokeParametersTest(t, "health_pulse", []byte(`{"action": "check_db_connections"}`), wantResult)
|
||||
|
||||
wantResult = "[]"
|
||||
tests.RunToolInvokeParametersTest(t, "health_pulse", []byte(`{"action": "check_schedule_failures"}`), wantResult)
|
||||
|
||||
wantResult = "[{\"Feature\":\"Unsupported in Looker (Google Cloud core)\"}]"
|
||||
tests.RunToolInvokeParametersTest(t, "health_pulse", []byte(`{"action": "check_legacy_features"}`), wantResult)
|
||||
|
||||
wantResult = "\"Project\":\"the_look\""
|
||||
tests.RunToolInvokeParametersTest(t, "health_analyze", []byte(`{"action": "projects"}`), wantResult)
|
||||
|
||||
wantResult = "\"Model\":\"the_look\""
|
||||
tests.RunToolInvokeParametersTest(t, "health_analyze", []byte(`{"action": "explores", "project": "the_look", "model": "the_look", "explore": "inventory_items"}`), wantResult)
|
||||
|
||||
wantResult = "\"Model\":\"the_look\""
|
||||
tests.RunToolInvokeParametersTest(t, "health_vacuum", []byte(`{"action": "models"}`), wantResult)
|
||||
}
|
||||
|
||||
func runConversationalAnalytics(t *testing.T, modelName, exploreName string) {
|
||||
exploreRefsJSON := fmt.Sprintf(`[{"model":"%s","explore":"%s"}]`, modelName, exploreName)
|
||||
|
||||
var refs []map[string]any
|
||||
if err := json.Unmarshal([]byte(exploreRefsJSON), &refs); err != nil {
|
||||
t.Fatalf("failed to unmarshal explore refs: %v", err)
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
exploreRefs []map[string]any
|
||||
wantStatusCode int
|
||||
wantInResult string
|
||||
wantInError string
|
||||
}{
|
||||
{
|
||||
name: "invoke conversational analytics with explore",
|
||||
exploreRefs: refs,
|
||||
wantStatusCode: http.StatusOK,
|
||||
wantInResult: `Answer`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
requestBodyMap := map[string]any{
|
||||
"user_query_with_context": "What is in the explore?",
|
||||
"explore_references": tc.exploreRefs,
|
||||
}
|
||||
bodyBytes, err := json.Marshal(requestBodyMap)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to marshal request body: %v", err)
|
||||
}
|
||||
url := "http://127.0.0.1:5000/api/tool/conversational_analytics/invoke"
|
||||
resp, bodyBytes := tests.RunRequest(t, http.MethodPost, url, bytes.NewBuffer(bodyBytes), nil)
|
||||
|
||||
if resp.StatusCode != tc.wantStatusCode {
|
||||
t.Fatalf("unexpected status code: got %d, want %d. Body: %s", resp.StatusCode, tc.wantStatusCode, string(bodyBytes))
|
||||
}
|
||||
|
||||
if tc.wantInResult != "" {
|
||||
var respBody map[string]interface{}
|
||||
if err := json.Unmarshal(bodyBytes, &respBody); err != nil {
|
||||
t.Fatalf("error parsing response body: %v", err)
|
||||
}
|
||||
got, ok := respBody["result"].(string)
|
||||
if !ok {
|
||||
t.Fatalf("unable to find result in response body")
|
||||
}
|
||||
if !strings.Contains(got, tc.wantInResult) {
|
||||
t.Errorf("unexpected result: got %q, want to contain %q", got, tc.wantInResult)
|
||||
}
|
||||
}
|
||||
|
||||
if tc.wantInError != "" {
|
||||
if !strings.Contains(string(bodyBytes), tc.wantInError) {
|
||||
t.Errorf("unexpected error message: got %q, want to contain %q", string(bodyBytes), tc.wantInError)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -97,6 +97,9 @@ func TestMSSQLToolEndpoints(t *testing.T) {
|
||||
t.Fatalf("unable to create SQL Server connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupMSSQLTables(t, ctx, pool)
|
||||
|
||||
// create table name with UUID
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
@@ -87,6 +87,9 @@ func TestMySQLToolEndpoints(t *testing.T) {
|
||||
t.Fatalf("unable to create MySQL connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupMySQLTables(t, ctx, pool)
|
||||
|
||||
// create table name with UUID
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
|
||||
@@ -137,6 +137,9 @@ func TestPostgres(t *testing.T) {
|
||||
t.Fatalf("unable to create postgres connection pool: %s", err)
|
||||
}
|
||||
|
||||
// cleanup test environment
|
||||
tests.CleanupPostgresTables(t, ctx, pool);
|
||||
|
||||
// create table name with UUID
|
||||
tableNameParam := "param_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
tableNameAuth := "auth_table_" + strings.ReplaceAll(uuid.New().String(), "-", "")
|
||||
@@ -237,7 +240,24 @@ func runPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth strin
|
||||
requestBody io.Reader
|
||||
wantStatusCode int
|
||||
want string
|
||||
isAllTables bool
|
||||
}{
|
||||
{
|
||||
name: "invoke list_tables all tables detailed output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(`{"table_names": ""}`)),
|
||||
wantStatusCode: http.StatusOK,
|
||||
want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)),
|
||||
isAllTables: true,
|
||||
},
|
||||
{
|
||||
name: "invoke list_tables all tables simple output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
requestBody: bytes.NewBuffer([]byte(`{"table_names": "", "output_format": "simple"}`)),
|
||||
wantStatusCode: http.StatusOK,
|
||||
want: fmt.Sprintf("[%s,%s]", getSimpleWant(tableNameAuth), getSimpleWant(tableNameParam)),
|
||||
isAllTables: true,
|
||||
},
|
||||
{
|
||||
name: "invoke list_tables detailed output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
@@ -334,6 +354,19 @@ func runPostgresListTablesTest(t *testing.T, tableNameParam, tableNameAuth strin
|
||||
t.Fatalf("failed to unmarshal expected want string: %v", err)
|
||||
}
|
||||
|
||||
// Checking only the default public schema where the test tables are created to avoid brittle tests.
|
||||
if tc.isAllTables {
|
||||
var filteredGot []any
|
||||
for _, item := range got {
|
||||
if tableMap, ok := item.(map[string]interface{}); ok {
|
||||
if schema, ok := tableMap["schema_name"]; ok && schema == "public" {
|
||||
filteredGot = append(filteredGot, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
got = filteredGot
|
||||
}
|
||||
|
||||
sort.SliceStable(got, func(i, j int) bool {
|
||||
return fmt.Sprintf("%v", got[i]) < fmt.Sprintf("%v", got[j])
|
||||
})
|
||||
|
||||
@@ -91,7 +91,7 @@ func initSpannerClients(ctx context.Context, project, instance, dbname string) (
|
||||
|
||||
func TestSpannerToolEndpoints(t *testing.T) {
|
||||
sourceConfig := getSpannerVars(t)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
var args []string
|
||||
|
||||
@@ -1184,7 +1184,15 @@ func RunMySQLListTablesTest(t *testing.T, databaseName, tableNameParam, tableNam
|
||||
wantStatusCode int
|
||||
want any
|
||||
isSimple bool
|
||||
isAllTables bool
|
||||
}{
|
||||
{
|
||||
name: "invoke list_tables for all tables detailed output",
|
||||
requestBody: bytes.NewBufferString(`{"table_names":""}`),
|
||||
wantStatusCode: http.StatusOK,
|
||||
want: []objectDetails{authTableWant, paramTableWant},
|
||||
isAllTables: true,
|
||||
},
|
||||
{
|
||||
name: "invoke list_tables detailed output",
|
||||
requestBody: bytes.NewBufferString(fmt.Sprintf(`{"table_names": "%s"}`, tableNameAuth)),
|
||||
@@ -1293,6 +1301,23 @@ func RunMySQLListTablesTest(t *testing.T, databaseName, tableNameParam, tableNam
|
||||
cmpopts.SortSlices(func(a, b map[string]any) bool { return a["name"].(string) < b["name"].(string) }),
|
||||
}
|
||||
|
||||
// Checking only the current database where the test tables are created to avoid brittle tests.
|
||||
if tc.isAllTables {
|
||||
var filteredGot []objectDetails
|
||||
if got != nil {
|
||||
for _, item := range got.([]objectDetails) {
|
||||
if item.SchemaName == databaseName {
|
||||
filteredGot = append(filteredGot, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
if len(filteredGot) == 0 {
|
||||
got = nil
|
||||
} else {
|
||||
got = filteredGot
|
||||
}
|
||||
}
|
||||
|
||||
if diff := cmp.Diff(tc.want, got, opts...); diff != "" {
|
||||
t.Errorf("Unexpected result: got %#v, want: %#v", got, tc.want)
|
||||
}
|
||||
@@ -1858,7 +1883,24 @@ func RunMSSQLListTablesTest(t *testing.T, tableNameParam, tableNameAuth string)
|
||||
requestBody string
|
||||
wantStatusCode int
|
||||
want string
|
||||
isAllTables bool
|
||||
}{
|
||||
{
|
||||
name: "invoke list_tables for all tables detailed output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
requestBody: `{"table_names": ""}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
want: fmt.Sprintf("[%s,%s]", getDetailedWant(tableNameAuth, authTableColumns), getDetailedWant(tableNameParam, paramTableColumns)),
|
||||
isAllTables: true,
|
||||
},
|
||||
{
|
||||
name: "invoke list_tables for all tables simple output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
requestBody: `{"table_names": "", "output_format": "simple"}`,
|
||||
wantStatusCode: http.StatusOK,
|
||||
want: fmt.Sprintf("[%s,%s]", getSimpleWant(tableNameAuth), getSimpleWant(tableNameParam)),
|
||||
isAllTables: true,
|
||||
},
|
||||
{
|
||||
name: "invoke list_tables detailed output",
|
||||
api: "http://127.0.0.1:5000/api/tool/list_tables/invoke",
|
||||
@@ -1968,6 +2010,19 @@ func RunMSSQLListTablesTest(t *testing.T, tableNameParam, tableNameAuth string)
|
||||
itemMap["object_details"] = detailsMap
|
||||
}
|
||||
|
||||
// Checking only the default dbo schema where the test tables are created to avoid brittle tests.
|
||||
if tc.isAllTables {
|
||||
var filteredGot []any
|
||||
for _, item := range got {
|
||||
if tableMap, ok := item.(map[string]interface{}); ok {
|
||||
if schema, ok := tableMap["schema_name"]; ok && schema == "dbo" {
|
||||
filteredGot = append(filteredGot, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
got = filteredGot
|
||||
}
|
||||
|
||||
sort.SliceStable(got, func(i, j int) bool {
|
||||
return fmt.Sprintf("%v", got[i]) < fmt.Sprintf("%v", got[j])
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user