Compare commits

...

9 Commits

Author SHA1 Message Date
Yuan Teoh
6048756e17 Merge branch 'main' into httplog 2026-02-13 11:07:19 -08:00
Averi Kitsch
a678886ee3 docs: update prebuilt tools ref (#2459)
## Description

> Should include a concise description of the changes (bug or feature),
it's
> impact, along with a summary of the solution

## PR Checklist

> Thank you for opening a Pull Request! Before submitting your PR, there
are a
> few things you can do to make sure it goes smoothly:

- [ ] Make sure you reviewed

[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [ ] Make sure to open an issue as a

[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
  before writing your code! That way we can discuss the change, evaluate
  designs, and agree on the general idea
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involve a breaking change

🛠️ Fixes #<issue_number_goes_here>
2026-02-13 18:57:55 +00:00
Averi Kitsch
6602abd059 docs: update diagram (#2461)
## Description

> Should include a concise description of the changes (bug or feature),
it's
> impact, along with a summary of the solution

## PR Checklist

> Thank you for opening a Pull Request! Before submitting your PR, there
are a
> few things you can do to make sure it goes smoothly:

- [ ] Make sure you reviewed

[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [ ] Make sure to open an issue as a

[bug/issue](https://github.com/googleapis/genai-toolbox/issues/new/choose)
  before writing your code! That way we can discuss the change, evaluate
  designs, and agree on the general idea
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involve a breaking change

🛠️ Fixes #<issue_number_goes_here>
2026-02-13 10:25:34 -08:00
Binh Tran
62b830987d fix: Deflake alloydb omni (#2431)
## Description

Improve logic to check that database is up.

**IMPORTANT** DO NOT MERGE until I have reverted
f7d7d9e708

## PR Checklist

- [x] Make sure you reviewed

[CONTRIBUTING.md](https://github.com/googleapis/genai-toolbox/blob/main/CONTRIBUTING.md)
- [x] https://github.com/googleapis/genai-toolbox/issues/2422
- [ ] Ensure the tests and linter pass
- [ ] Code coverage does not decrease (if any source code was changed)
- [ ] Appropriate docs were updated (if necessary)
- [ ] Make sure to add `!` if this involve a breaking change

🛠️ Fixes #2422

Co-authored-by: Averi Kitsch <akitsch@google.com>
2026-02-13 09:55:53 -08:00
Yuan Teoh
195767bdcd refactor: refactor subcommands and move tests to its own package (#2439)
This PR refactors the command structure to decouple subcommands from the
root command, improving modularity and testability.

**Key Changes:**
- Move `internal/cli` to `cmd/internal`. Being in a `internal` folder,
other packages outside of `cmd` will not be able to import them.
- Encapsulated I/O: Introduced a new IOStreams struct to standardize in,
out, and errOut handling.
- Shared Dependencies: Extracted shared fields (including IOStreams,
Logger, ServerConfig, and various Tools paths) from the root `Command`
into a new `ToolboxOptions` struct. This also includes moving
`cmd/options.go` to be part of `ToolboxOptions`.
- Logic Migration: Moved setup logic, such as `Setup()` and
`LoadConfig()`, into `ToolboxOptions`. Removing the need to import
`rootCmd` to subcommands.
- Package Reorganization:
- Relocated PersistentFlag and ToolsFiles to the cli package to remove
base command dependencies. This removes dependencies on the base
command, allowing subcommands to consume these utilities independently.
- Moved all side-effect registration to the `cmd/internal` package,
enabling other packages to import it safely for unit tests.

**Testing Improvements:**
- Subcommand packages can now be tested in isolation without relying on
the base command package.
- Added `TestSubcommandWiring()` to the base command tests to verify
proper subcommand registration.
2026-02-13 02:28:58 +00:00
Yuan Teoh
d55666504d make a local copy of the schema instead of modifying it directly 2026-02-10 16:36:48 -08:00
Yuan Teoh
df0229c882 Update internal/log/log.go
Co-authored-by: gemini-code-assist[bot] <176961590+gemini-code-assist[bot]@users.noreply.github.com>
2026-02-10 16:29:01 -08:00
Yuan Teoh
d4959093c8 fix panic 2026-02-10 16:07:44 -08:00
Yuan Teoh
8ffafd7937 chore(deps): update httplog to v3 2026-02-10 10:56:47 -08:00
24 changed files with 3474 additions and 3185 deletions

257
cmd/internal/imports.go Normal file
View File

@@ -0,0 +1,257 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
// Import prompt packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/prompts/custom"
// Import tool packages for side effect of registration
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreatecluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbcreateuser"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetcluster"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbgetuser"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistclusters"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydblistusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydb/alloydbwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/alloydbainl"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryanalyzecontribution"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigqueryforecast"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygetdatasetinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerygettableinfo"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylistdatasetids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerylisttableids"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysearchcatalog"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/tools/cassandra/cassandracql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhouselisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/clickhouse/clickhousesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirfetchpage"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatienteverything"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarefhirpatientsearch"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdataset"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstore"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetdicomstoremetrics"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirresource"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstore"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaregetfhirstoremetrics"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistdicomstores"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcarelistfhirstores"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcareretrieverendereddicominstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicominstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomseries"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudhealthcare/cloudhealthcaresearchdicomstudies"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistlognames"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminlistresourcetypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudloggingadmin/cloudloggingadminquerylogs"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcloneinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatebackup"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreatedatabase"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlcreateusers"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlgetinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistdatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqllistinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlrestorebackup"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsql/cloudsqlwaitforoperation"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmssql/cloudsqlmssqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlmysql/cloudsqlmysqlcreateinstance"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgcreateinstances"
_ "github.com/googleapis/genai-toolbox/internal/tools/cloudsqlpg/cloudsqlpgupgradeprecheck"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdblisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/cockroachdb/cockroachdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataform/dataformcompilelocal"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexlookupentry"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchaspecttypes"
_ "github.com/googleapis/genai-toolbox/internal/tools/dataplex/dataplexsearchentries"
_ "github.com/googleapis/genai-toolbox/internal/tools/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/tools/elasticsearch/elasticsearchesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firebird/firebirdsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreadddocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoredeletedocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetdocuments"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoregetrules"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorelistcollections"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequery"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorequerycollection"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestoreupdatedocument"
_ "github.com/googleapis/genai-toolbox/internal/tools/firestore/firestorevalidaterules"
_ "github.com/googleapis/genai-toolbox/internal/tools/http"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardelement"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookeradddashboardfilter"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerconversationalanalytics"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookercreateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdeleteprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerdevmode"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergenerateembedurl"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiondatabases"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnections"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectionschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontablecolumns"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetconnectiontables"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdashboards"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetdimensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetexplores"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetfilters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetlooks"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmeasures"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetmodels"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetparameters"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojectfiles"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookergetprojects"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthanalyze"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthpulse"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerhealthvacuum"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakedashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookermakelook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquery"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerqueryurl"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrundashboard"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerrunlook"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookerupdateprojectfile"
_ "github.com/googleapis/genai-toolbox/internal/tools/looker/lookervalidateproject"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mindsdb/mindsdbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbaggregate"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeletemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbdeleteone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfind"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbfindone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertmany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbinsertone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdatemany"
_ "github.com/googleapis/genai-toolbox/internal/tools/mongodb/mongodbupdateone"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mssql/mssqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlgetqueryplan"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablefragmentation"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqllisttablesmissinguniqueindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/mysql/mysqlsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jcypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jexecutecypher"
_ "github.com/googleapis/genai-toolbox/internal/tools/neo4j/neo4jschema"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbaseexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oceanbase/oceanbasesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oracleexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/oracle/oraclesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresdatabaseoverview"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresgetcolumncardinality"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistactivequeries"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistavailableextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistdatabasestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistindexes"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistinstalledextensions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistlocks"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpgsettings"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistpublicationtables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistquerystats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistroles"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistschemas"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistsequences"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresliststoredprocedure"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablespaces"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttablestats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslisttriggers"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslistviews"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgreslongrunningtransactions"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgresreplicationstats"
_ "github.com/googleapis/genai-toolbox/internal/tools/postgres/postgressql"
_ "github.com/googleapis/genai-toolbox/internal/tools/redis"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcancelbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatepysparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkcreatesparkbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparkgetbatch"
_ "github.com/googleapis/genai-toolbox/internal/tools/serverlessspark/serverlesssparklistbatches"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoreexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/singlestore/singlestoresql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakeexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/snowflake/snowflakesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlistgraphs"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannerlisttables"
_ "github.com/googleapis/genai-toolbox/internal/tools/spanner/spannersql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqliteexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/tidb/tidbsql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinoexecutesql"
_ "github.com/googleapis/genai-toolbox/internal/tools/trino/trinosql"
_ "github.com/googleapis/genai-toolbox/internal/tools/utility/wait"
_ "github.com/googleapis/genai-toolbox/internal/tools/valkey"
_ "github.com/googleapis/genai-toolbox/internal/tools/yugabytedbsql"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/alloydbpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigtable"
_ "github.com/googleapis/genai-toolbox/internal/sources/cassandra"
_ "github.com/googleapis/genai-toolbox/internal/sources/clickhouse"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudgda"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudhealthcare"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudloggingadmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudmonitoring"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqladmin"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlmysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/cloudsqlpg"
_ "github.com/googleapis/genai-toolbox/internal/sources/cockroachdb"
_ "github.com/googleapis/genai-toolbox/internal/sources/couchbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/dataplex"
_ "github.com/googleapis/genai-toolbox/internal/sources/dgraph"
_ "github.com/googleapis/genai-toolbox/internal/sources/elasticsearch"
_ "github.com/googleapis/genai-toolbox/internal/sources/firebird"
_ "github.com/googleapis/genai-toolbox/internal/sources/firestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/http"
_ "github.com/googleapis/genai-toolbox/internal/sources/looker"
_ "github.com/googleapis/genai-toolbox/internal/sources/mindsdb"
_ "github.com/googleapis/genai-toolbox/internal/sources/mongodb"
_ "github.com/googleapis/genai-toolbox/internal/sources/mssql"
_ "github.com/googleapis/genai-toolbox/internal/sources/mysql"
_ "github.com/googleapis/genai-toolbox/internal/sources/neo4j"
_ "github.com/googleapis/genai-toolbox/internal/sources/oceanbase"
_ "github.com/googleapis/genai-toolbox/internal/sources/oracle"
_ "github.com/googleapis/genai-toolbox/internal/sources/postgres"
_ "github.com/googleapis/genai-toolbox/internal/sources/redis"
_ "github.com/googleapis/genai-toolbox/internal/sources/serverlessspark"
_ "github.com/googleapis/genai-toolbox/internal/sources/singlestore"
_ "github.com/googleapis/genai-toolbox/internal/sources/snowflake"
_ "github.com/googleapis/genai-toolbox/internal/sources/spanner"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/sources/tidb"
_ "github.com/googleapis/genai-toolbox/internal/sources/trino"
_ "github.com/googleapis/genai-toolbox/internal/sources/valkey"
_ "github.com/googleapis/genai-toolbox/internal/sources/yugabytedb"
)

View File

@@ -18,37 +18,15 @@ import (
"context"
"encoding/json"
"fmt"
"io"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/cmd/internal"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/server/resources"
"github.com/googleapis/genai-toolbox/internal/util/parameters"
"github.com/spf13/cobra"
)
// RootCommand defines the interface for required by invoke subcommand.
// This allows subcommands to access shared resources and functionality without
// direct coupling to the root command's implementation.
type RootCommand interface {
// Config returns a copy of the current server configuration.
Config() server.ServerConfig
// Out returns the writer used for standard output.
Out() io.Writer
// LoadConfig loads and merges the configuration from files, folders, and prebuilts.
LoadConfig(ctx context.Context) error
// Setup initializes the runtime environment, including logging and telemetry.
// It returns the updated context and a shutdown function to be called when finished.
Setup(ctx context.Context) (context.Context, func(context.Context) error, error)
// Logger returns the logger instance.
Logger() log.Logger
}
func NewCommand(rootCmd RootCommand) *cobra.Command {
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
cmd := &cobra.Command{
Use: "invoke <tool-name> [params]",
Short: "Execute a tool directly",
@@ -58,17 +36,17 @@ Example:
toolbox invoke my-tool '{"param1": "value1"}'`,
Args: cobra.MinimumNArgs(1),
RunE: func(c *cobra.Command, args []string) error {
return runInvoke(c, args, rootCmd)
return runInvoke(c, args, opts)
},
}
return cmd
}
func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
func runInvoke(cmd *cobra.Command, args []string, opts *internal.ToolboxOptions) error {
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
ctx, shutdown, err := rootCmd.Setup(ctx)
ctx, shutdown, err := opts.Setup(ctx)
if err != nil {
return err
}
@@ -76,16 +54,16 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
_ = shutdown(ctx)
}()
// Load and merge tool configurations
if err := rootCmd.LoadConfig(ctx); err != nil {
_, err = opts.LoadConfig(ctx)
if err != nil {
return err
}
// Initialize Resources
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, rootCmd.Config())
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
if err != nil {
errMsg := fmt.Errorf("failed to initialize resources: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -96,7 +74,7 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
tool, ok := resourceMgr.GetTool(toolName)
if !ok {
errMsg := fmt.Errorf("tool %q not found", toolName)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -109,7 +87,7 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
if paramsInput != "" {
if err := json.Unmarshal([]byte(paramsInput), &params); err != nil {
errMsg := fmt.Errorf("params must be a valid JSON string: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
@@ -117,14 +95,14 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
parsedParams, err := parameters.ParseParams(tool.GetParameters(), params, nil)
if err != nil {
errMsg := fmt.Errorf("invalid parameters: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
parsedParams, err = tool.EmbedParams(ctx, parsedParams, resourceMgr.GetEmbeddingModelMap())
if err != nil {
errMsg := fmt.Errorf("error embedding parameters: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -132,19 +110,19 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
requiresAuth, err := tool.RequiresClientAuthorization(resourceMgr)
if err != nil {
errMsg := fmt.Errorf("failed to check auth requirements: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
if requiresAuth {
errMsg := fmt.Errorf("client authorization is not supported")
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
result, err := tool.Invoke(ctx, resourceMgr, parsedParams, "")
if err != nil {
errMsg := fmt.Errorf("tool execution failed: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -152,10 +130,10 @@ func runInvoke(cmd *cobra.Command, args []string, rootCmd RootCommand) error {
output, err := json.MarshalIndent(result, "", " ")
if err != nil {
errMsg := fmt.Errorf("failed to marshal result: %w", err)
rootCmd.Logger().ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
fmt.Fprintln(rootCmd.Out(), string(output))
fmt.Fprintln(opts.IOStreams.Out, string(output))
return nil
}

View File

@@ -12,16 +12,38 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
package invoke
import (
"context"
"bytes"
"os"
"path/filepath"
"strings"
"testing"
"github.com/googleapis/genai-toolbox/cmd/internal"
_ "github.com/googleapis/genai-toolbox/internal/sources/bigquery"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/tools/bigquery/bigquerysql"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
"github.com/spf13/cobra"
)
func invokeCommand(args []string) (string, error) {
parentCmd := &cobra.Command{Use: "toolbox"}
buf := new(bytes.Buffer)
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
internal.PersistentFlags(parentCmd, opts)
cmd := NewCommand(opts)
parentCmd.AddCommand(cmd)
parentCmd.SetArgs(args)
err := parentCmd.Execute()
return buf.String(), err
}
func TestInvokeTool(t *testing.T) {
// Create a temporary tools file
tmpDir := t.TempDir()
@@ -86,7 +108,7 @@ tools:
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
_, got, err := invokeCommandWithContext(context.Background(), tc.args)
got, err := invokeCommand(tc.args)
if (err != nil) != tc.wantErr {
t.Fatalf("got error %v, wantErr %v", err, tc.wantErr)
}
@@ -121,7 +143,7 @@ tools:
}
args := []string{"invoke", "bq-tool", "--tools-file", toolsFilePath}
_, _, err := invokeCommandWithContext(context.Background(), args)
_, err := invokeCommand(args)
if err == nil {
t.Fatal("expected error for tool requiring client auth, but got nil")
}

251
cmd/internal/options.go Normal file
View File

@@ -0,0 +1,251 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"context"
"fmt"
"io"
"os"
"slices"
"strings"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/telemetry"
"github.com/googleapis/genai-toolbox/internal/util"
)
type IOStreams struct {
In io.Reader
Out io.Writer
ErrOut io.Writer
}
// ToolboxOptions holds dependencies shared by all commands.
type ToolboxOptions struct {
IOStreams IOStreams
Logger log.Logger
Cfg server.ServerConfig
ToolsFile string
ToolsFiles []string
ToolsFolder string
PrebuiltConfigs []string
}
// Option defines a function that modifies the ToolboxOptions struct.
type Option func(*ToolboxOptions)
// NewToolboxOptions creates a new instance with defaults, then applies any
// provided options.
func NewToolboxOptions(opts ...Option) *ToolboxOptions {
o := &ToolboxOptions{
IOStreams: IOStreams{
In: os.Stdin,
Out: os.Stdout,
ErrOut: os.Stderr,
},
}
for _, opt := range opts {
opt(o)
}
return o
}
// Apply allows you to update an EXISTING ToolboxOptions instance.
// This is useful for "late binding".
func (o *ToolboxOptions) Apply(opts ...Option) {
for _, opt := range opts {
opt(o)
}
}
// WithIOStreams updates the IO streams.
func WithIOStreams(out, err io.Writer) Option {
return func(o *ToolboxOptions) {
o.IOStreams.Out = out
o.IOStreams.ErrOut = err
}
}
// Setup create logger and telemetry instrumentations.
func (opts *ToolboxOptions) Setup(ctx context.Context) (context.Context, func(context.Context) error, error) {
// If stdio, set logger's out stream (usually DEBUG and INFO logs) to
// errStream
loggerOut := opts.IOStreams.Out
if opts.Cfg.Stdio {
loggerOut = opts.IOStreams.ErrOut
}
// Handle logger separately from config
logger, err := log.NewLogger(opts.Cfg.LoggingFormat.String(), opts.Cfg.LogLevel.String(), loggerOut, opts.IOStreams.ErrOut)
if err != nil {
return ctx, nil, fmt.Errorf("unable to initialize logger: %w", err)
}
ctx = util.WithLogger(ctx, logger)
opts.Logger = logger
// Set up OpenTelemetry
otelShutdown, err := telemetry.SetupOTel(ctx, opts.Cfg.Version, opts.Cfg.TelemetryOTLP, opts.Cfg.TelemetryGCP, opts.Cfg.TelemetryServiceName)
if err != nil {
errMsg := fmt.Errorf("error setting up OpenTelemetry: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return ctx, nil, errMsg
}
shutdownFunc := func(ctx context.Context) error {
err := otelShutdown(ctx)
if err != nil {
errMsg := fmt.Errorf("error shutting down OpenTelemetry: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return err
}
return nil
}
instrumentation, err := telemetry.CreateTelemetryInstrumentation(opts.Cfg.Version)
if err != nil {
errMsg := fmt.Errorf("unable to create telemetry instrumentation: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return ctx, shutdownFunc, errMsg
}
ctx = util.WithInstrumentation(ctx, instrumentation)
return ctx, shutdownFunc, nil
}
// LoadConfig checks and merge files that should be loaded into the server
func (opts *ToolboxOptions) LoadConfig(ctx context.Context) (bool, error) {
// Determine if Custom Files should be loaded
// Check for explicit custom flags
isCustomConfigured := opts.ToolsFile != "" || len(opts.ToolsFiles) > 0 || opts.ToolsFolder != ""
// Determine if default 'tools.yaml' should be used (No prebuilt AND No custom flags)
useDefaultToolsFile := len(opts.PrebuiltConfigs) == 0 && !isCustomConfigured
if useDefaultToolsFile {
opts.ToolsFile = "tools.yaml"
isCustomConfigured = true
}
logger, err := util.LoggerFromContext(ctx)
if err != nil {
return isCustomConfigured, err
}
var allToolsFiles []ToolsFile
// Load Prebuilt Configuration
if len(opts.PrebuiltConfigs) > 0 {
slices.Sort(opts.PrebuiltConfigs)
sourcesList := strings.Join(opts.PrebuiltConfigs, ", ")
logMsg := fmt.Sprintf("Using prebuilt tool configurations for: %s", sourcesList)
logger.InfoContext(ctx, logMsg)
for _, configName := range opts.PrebuiltConfigs {
buf, err := prebuiltconfigs.Get(configName)
if err != nil {
logger.ErrorContext(ctx, err.Error())
return isCustomConfigured, err
}
// Parse into ToolsFile struct
parsed, err := parseToolsFile(ctx, buf)
if err != nil {
errMsg := fmt.Errorf("unable to parse prebuilt tool configuration for '%s': %w", configName, err)
logger.ErrorContext(ctx, errMsg.Error())
return isCustomConfigured, errMsg
}
allToolsFiles = append(allToolsFiles, parsed)
}
}
// Load Custom Configurations
if isCustomConfigured {
// Enforce exclusivity among custom flags (tools-file vs tools-files vs tools-folder)
if (opts.ToolsFile != "" && len(opts.ToolsFiles) > 0) ||
(opts.ToolsFile != "" && opts.ToolsFolder != "") ||
(len(opts.ToolsFiles) > 0 && opts.ToolsFolder != "") {
errMsg := fmt.Errorf("--tools-file, --tools-files, and --tools-folder flags cannot be used simultaneously")
logger.ErrorContext(ctx, errMsg.Error())
return isCustomConfigured, errMsg
}
var customTools ToolsFile
var err error
if len(opts.ToolsFiles) > 0 {
// Use tools-files
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging %d tool configuration files", len(opts.ToolsFiles)))
customTools, err = LoadAndMergeToolsFiles(ctx, opts.ToolsFiles)
} else if opts.ToolsFolder != "" {
// Use tools-folder
logger.InfoContext(ctx, fmt.Sprintf("Loading and merging all YAML files from directory: %s", opts.ToolsFolder))
customTools, err = LoadAndMergeToolsFolder(ctx, opts.ToolsFolder)
} else {
// Use single file (tools-file or default `tools.yaml`)
buf, readFileErr := os.ReadFile(opts.ToolsFile)
if readFileErr != nil {
errMsg := fmt.Errorf("unable to read tool file at %q: %w", opts.ToolsFile, readFileErr)
logger.ErrorContext(ctx, errMsg.Error())
return isCustomConfigured, errMsg
}
customTools, err = parseToolsFile(ctx, buf)
if err != nil {
err = fmt.Errorf("unable to parse tool file at %q: %w", opts.ToolsFile, err)
}
}
if err != nil {
logger.ErrorContext(ctx, err.Error())
return isCustomConfigured, err
}
allToolsFiles = append(allToolsFiles, customTools)
}
// Modify version string based on loaded configurations
if len(opts.PrebuiltConfigs) > 0 {
tag := "prebuilt"
if isCustomConfigured {
tag = "custom"
}
// prebuiltConfigs is already sorted above
for _, configName := range opts.PrebuiltConfigs {
opts.Cfg.Version += fmt.Sprintf("+%s.%s", tag, configName)
}
}
// Merge Everything
// This will error if custom tools collide with prebuilt tools
finalToolsFile, err := mergeToolsFiles(allToolsFiles...)
if err != nil {
logger.ErrorContext(ctx, err.Error())
return isCustomConfigured, err
}
opts.Cfg.SourceConfigs = finalToolsFile.Sources
opts.Cfg.AuthServiceConfigs = finalToolsFile.AuthServices
opts.Cfg.EmbeddingModelConfigs = finalToolsFile.EmbeddingModels
opts.Cfg.ToolConfigs = finalToolsFile.Tools
opts.Cfg.ToolsetConfigs = finalToolsFile.Toolsets
opts.Cfg.PromptConfigs = finalToolsFile.Prompts
return isCustomConfigured, nil
}

View File

@@ -12,57 +12,38 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
package internal
import (
"errors"
"io"
"testing"
"github.com/spf13/cobra"
)
func TestCommandOptions(t *testing.T) {
func TestToolboxOptions(t *testing.T) {
w := io.Discard
tcs := []struct {
desc string
isValid func(*Command) error
isValid func(*ToolboxOptions) error
option Option
}{
{
desc: "with logger",
isValid: func(c *Command) error {
if c.outStream != w || c.errStream != w {
isValid: func(o *ToolboxOptions) error {
if o.IOStreams.Out != w || o.IOStreams.ErrOut != w {
return errors.New("loggers do not match")
}
return nil
},
option: WithStreams(w, w),
option: WithIOStreams(w, w),
},
}
for _, tc := range tcs {
t.Run(tc.desc, func(t *testing.T) {
got, err := invokeProxyWithOption(tc.option)
if err != nil {
t.Fatal(err)
}
got := NewToolboxOptions(tc.option)
if err := tc.isValid(got); err != nil {
t.Errorf("option did not initialize command correctly: %v", err)
}
})
}
}
func invokeProxyWithOption(o Option) (*Command, error) {
c := NewCommand(o)
// Keep the test output quiet
c.SilenceUsage = true
c.SilenceErrors = true
// Disable execute behavior
c.RunE = func(*cobra.Command, []string) error {
return nil
}
err := c.Execute()
return c, err
}

View File

@@ -0,0 +1,46 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"fmt"
"strings"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
"github.com/spf13/cobra"
)
// PersistentFlags sets up flags that are available for all commands and
// subcommands
// It is also used to set up persistent flags during subcommand unit tests
func PersistentFlags(parentCmd *cobra.Command, opts *ToolboxOptions) {
persistentFlags := parentCmd.PersistentFlags()
persistentFlags.StringVar(&opts.ToolsFile, "tools-file", "", "File path specifying the tool configuration. Cannot be used with --tools-files, or --tools-folder.")
persistentFlags.StringSliceVar(&opts.ToolsFiles, "tools-files", []string{}, "Multiple file paths specifying tool configurations. Files will be merged. Cannot be used with --tools-file, or --tools-folder.")
persistentFlags.StringVar(&opts.ToolsFolder, "tools-folder", "", "Directory path containing YAML tool configuration files. All .yaml and .yml files in the directory will be loaded and merged. Cannot be used with --tools-file, or --tools-files.")
persistentFlags.Var(&opts.Cfg.LogLevel, "log-level", "Specify the minimum level logged. Allowed: 'DEBUG', 'INFO', 'WARN', 'ERROR'.")
persistentFlags.Var(&opts.Cfg.LoggingFormat, "logging-format", "Specify logging format to use. Allowed: 'standard' or 'JSON'.")
persistentFlags.BoolVar(&opts.Cfg.TelemetryGCP, "telemetry-gcp", false, "Enable exporting directly to Google Cloud Monitoring.")
persistentFlags.StringVar(&opts.Cfg.TelemetryOTLP, "telemetry-otlp", "", "Enable exporting using OpenTelemetry Protocol (OTLP) to the specified endpoint (e.g. 'http://127.0.0.1:4318')")
persistentFlags.StringVar(&opts.Cfg.TelemetryServiceName, "telemetry-service-name", "toolbox", "Sets the value of the service.name resource attribute for telemetry data.")
// Fetch prebuilt tools sources to customize the help description
prebuiltHelp := fmt.Sprintf(
"Use a prebuilt tool configuration by source type. Allowed: '%s'. Can be specified multiple times.",
strings.Join(prebuiltconfigs.GetPrebuiltSources(), "', '"),
)
persistentFlags.StringSliceVar(&opts.PrebuiltConfigs, "prebuilt", []string{}, prebuiltHelp)
persistentFlags.StringSliceVar(&opts.Cfg.UserAgentMetadata, "user-agent-metadata", []string{}, "Appends additional metadata to the User-Agent.")
}

View File

@@ -22,7 +22,7 @@ import (
"path/filepath"
"sort"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/cmd/internal"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/server/resources"
"github.com/googleapis/genai-toolbox/internal/tools"
@@ -30,28 +30,9 @@ import (
"github.com/spf13/cobra"
)
// RootCommand defines the interface for required by skills-generate subcommand.
// This allows subcommands to access shared resources and functionality without
// direct coupling to the root command's implementation.
type RootCommand interface {
// Config returns a copy of the current server configuration.
Config() server.ServerConfig
// LoadConfig loads and merges the configuration from files, folders, and prebuilts.
LoadConfig(ctx context.Context) error
// Setup initializes the runtime environment, including logging and telemetry.
// It returns the updated context and a shutdown function to be called when finished.
Setup(ctx context.Context) (context.Context, func(context.Context) error, error)
// Logger returns the logger instance.
Logger() log.Logger
}
// Command is the command for generating skills.
type Command struct {
// skillsCmd is the command for generating skills.
type skillsCmd struct {
*cobra.Command
rootCmd RootCommand
name string
description string
toolset string
@@ -59,15 +40,13 @@ type Command struct {
}
// NewCommand creates a new Command.
func NewCommand(rootCmd RootCommand) *cobra.Command {
cmd := &Command{
rootCmd: rootCmd,
}
func NewCommand(opts *internal.ToolboxOptions) *cobra.Command {
cmd := &skillsCmd{}
cmd.Command = &cobra.Command{
Use: "skills-generate",
Short: "Generate skills from tool configurations",
RunE: func(c *cobra.Command, args []string) error {
return cmd.run(c)
return run(cmd, opts)
},
}
@@ -81,11 +60,11 @@ func NewCommand(rootCmd RootCommand) *cobra.Command {
return cmd.Command
}
func (c *Command) run(cmd *cobra.Command) error {
func run(cmd *skillsCmd, opts *internal.ToolboxOptions) error {
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
ctx, shutdown, err := c.rootCmd.Setup(ctx)
ctx, shutdown, err := opts.Setup(ctx)
if err != nil {
return err
}
@@ -93,39 +72,37 @@ func (c *Command) run(cmd *cobra.Command) error {
_ = shutdown(ctx)
}()
logger := c.rootCmd.Logger()
// Load and merge tool configurations
if err := c.rootCmd.LoadConfig(ctx); err != nil {
_, err = opts.LoadConfig(ctx)
if err != nil {
return err
}
if err := os.MkdirAll(c.outputDir, 0755); err != nil {
if err := os.MkdirAll(cmd.outputDir, 0755); err != nil {
errMsg := fmt.Errorf("error creating output directory: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", c.name))
opts.Logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", cmd.name))
// Initialize toolbox and collect tools
allTools, err := c.collectTools(ctx)
allTools, err := cmd.collectTools(ctx, opts)
if err != nil {
errMsg := fmt.Errorf("error collecting tools: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
if len(allTools) == 0 {
logger.InfoContext(ctx, "No tools found to generate.")
opts.Logger.InfoContext(ctx, "No tools found to generate.")
return nil
}
// Generate the combined skill directory
skillPath := filepath.Join(c.outputDir, c.name)
skillPath := filepath.Join(cmd.outputDir, cmd.name)
if err := os.MkdirAll(skillPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating skill directory: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -133,7 +110,7 @@ func (c *Command) run(cmd *cobra.Command) error {
assetsPath := filepath.Join(skillPath, "assets")
if err := os.MkdirAll(assetsPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating assets dir: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -141,7 +118,7 @@ func (c *Command) run(cmd *cobra.Command) error {
scriptsPath := filepath.Join(skillPath, "scripts")
if err := os.MkdirAll(scriptsPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating scripts dir: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -154,10 +131,10 @@ func (c *Command) run(cmd *cobra.Command) error {
for _, toolName := range toolNames {
// Generate YAML config in asset directory
minimizedContent, err := generateToolConfigYAML(c.rootCmd.Config(), toolName)
minimizedContent, err := generateToolConfigYAML(opts.Cfg, toolName)
if err != nil {
errMsg := fmt.Errorf("error generating filtered config for %s: %w", toolName, err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
@@ -166,7 +143,7 @@ func (c *Command) run(cmd *cobra.Command) error {
destPath := filepath.Join(assetsPath, specificToolsFileName)
if err := os.WriteFile(destPath, minimizedContent, 0644); err != nil {
errMsg := fmt.Errorf("error writing filtered config for %s: %w", toolName, err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
@@ -175,40 +152,40 @@ func (c *Command) run(cmd *cobra.Command) error {
scriptContent, err := generateScriptContent(toolName, specificToolsFileName)
if err != nil {
errMsg := fmt.Errorf("error generating script content for %s: %w", toolName, err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
scriptFilename := filepath.Join(scriptsPath, fmt.Sprintf("%s.js", toolName))
if err := os.WriteFile(scriptFilename, []byte(scriptContent), 0755); err != nil {
errMsg := fmt.Errorf("error writing script %s: %w", scriptFilename, err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
// Generate SKILL.md
skillContent, err := generateSkillMarkdown(c.name, c.description, allTools)
skillContent, err := generateSkillMarkdown(cmd.name, cmd.description, allTools)
if err != nil {
errMsg := fmt.Errorf("error generating SKILL.md content: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
skillMdPath := filepath.Join(skillPath, "SKILL.md")
if err := os.WriteFile(skillMdPath, []byte(skillContent), 0644); err != nil {
errMsg := fmt.Errorf("error writing SKILL.md: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
opts.Logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", c.name, len(allTools)))
opts.Logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", cmd.name, len(allTools)))
return nil
}
func (c *Command) collectTools(ctx context.Context) (map[string]tools.Tool, error) {
func (c *skillsCmd) collectTools(ctx context.Context, opts *internal.ToolboxOptions) (map[string]tools.Tool, error) {
// Initialize Resources
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, c.rootCmd.Config())
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, opts.Cfg)
if err != nil {
return nil, fmt.Errorf("failed to initialize resources: %w", err)
}

View File

@@ -12,17 +12,36 @@
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
package skills
import (
"context"
"bytes"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/googleapis/genai-toolbox/cmd/internal"
_ "github.com/googleapis/genai-toolbox/internal/sources/sqlite"
_ "github.com/googleapis/genai-toolbox/internal/tools/sqlite/sqlitesql"
"github.com/spf13/cobra"
)
func invokeCommand(args []string) (string, error) {
parentCmd := &cobra.Command{Use: "toolbox"}
buf := new(bytes.Buffer)
opts := internal.NewToolboxOptions(internal.WithIOStreams(buf, buf))
internal.PersistentFlags(parentCmd, opts)
cmd := NewCommand(opts)
parentCmd.AddCommand(cmd)
parentCmd.SetArgs(args)
err := parentCmd.Execute()
return buf.String(), err
}
func TestGenerateSkill(t *testing.T) {
// Create a temporary directory for tests
tmpDir := t.TempDir()
@@ -55,10 +74,7 @@ tools:
"--description", "hello tool",
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
_, got, err := invokeCommandWithContext(ctx, args)
got, err := invokeCommand(args)
if err != nil {
t.Fatalf("command failed: %v\nOutput: %s", err, got)
}
@@ -136,7 +152,7 @@ func TestGenerateSkill_NoConfig(t *testing.T) {
"--description", "test",
}
_, _, err := invokeCommandWithContext(context.Background(), args)
_, err := invokeCommand(args)
if err == nil {
t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing")
}
@@ -170,7 +186,7 @@ func TestGenerateSkill_MissingArguments(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, got, err := invokeCommandWithContext(context.Background(), tt.args)
got, err := invokeCommand(tt.args)
if err == nil {
t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got)
}

349
cmd/internal/tools_file.go Normal file
View File

@@ -0,0 +1,349 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package internal
import (
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"regexp"
"slices"
"strings"
"github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/server"
)
type ToolsFile struct {
Sources server.SourceConfigs `yaml:"sources"`
AuthServices server.AuthServiceConfigs `yaml:"authServices"`
EmbeddingModels server.EmbeddingModelConfigs `yaml:"embeddingModels"`
Tools server.ToolConfigs `yaml:"tools"`
Toolsets server.ToolsetConfigs `yaml:"toolsets"`
Prompts server.PromptConfigs `yaml:"prompts"`
}
// parseEnv replaces environment variables ${ENV_NAME} with their values.
// also support ${ENV_NAME:default_value}.
func parseEnv(input string) (string, error) {
re := regexp.MustCompile(`\$\{(\w+)(:([^}]*))?\}`)
var err error
output := re.ReplaceAllStringFunc(input, func(match string) string {
parts := re.FindStringSubmatch(match)
// extract the variable name
variableName := parts[1]
if value, found := os.LookupEnv(variableName); found {
return value
}
if len(parts) >= 4 && parts[2] != "" {
return parts[3]
}
err = fmt.Errorf("environment variable not found: %q", variableName)
return ""
})
return output, err
}
// parseToolsFile parses the provided yaml into appropriate configs.
func parseToolsFile(ctx context.Context, raw []byte) (ToolsFile, error) {
var toolsFile ToolsFile
// Replace environment variables if found
output, err := parseEnv(string(raw))
if err != nil {
return toolsFile, fmt.Errorf("error parsing environment variables: %s", err)
}
raw = []byte(output)
raw, err = convertToolsFile(raw)
if err != nil {
return toolsFile, fmt.Errorf("error converting tools file: %s", err)
}
// Parse contents
toolsFile.Sources, toolsFile.AuthServices, toolsFile.EmbeddingModels, toolsFile.Tools, toolsFile.Toolsets, toolsFile.Prompts, err = server.UnmarshalResourceConfig(ctx, raw)
if err != nil {
return toolsFile, err
}
return toolsFile, nil
}
func convertToolsFile(raw []byte) ([]byte, error) {
var input yaml.MapSlice
decoder := yaml.NewDecoder(bytes.NewReader(raw), yaml.UseOrderedMap())
// convert to tools file v2
var buf bytes.Buffer
encoder := yaml.NewEncoder(&buf)
v1keys := []string{"sources", "authSources", "authServices", "embeddingModels", "tools", "toolsets", "prompts"}
for {
if err := decoder.Decode(&input); err != nil {
if err == io.EOF {
break
}
return nil, err
}
for _, item := range input {
key, ok := item.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key in input: %v", item.Key)
}
// check if the key is config file v1's key
if slices.Contains(v1keys, key) {
// check if value conversion to yaml.MapSlice successfully
// fields such as "tools" in toolsets might pass the first check but
// fail to convert to MapSlice
if slice, ok := item.Value.(yaml.MapSlice); ok {
// Deprecated: convert authSources to authServices
if key == "authSources" {
key = "authServices"
}
transformed, err := transformDocs(key, slice)
if err != nil {
return nil, err
}
// encode per-doc
for _, doc := range transformed {
if err := encoder.Encode(doc); err != nil {
return nil, err
}
}
} else {
// invalid input will be ignored
// we don't want to throw error here since the config could
// be valid but with a different order such as:
// ---
// tools:
// - tool_a
// kind: toolsets
// ---
continue
}
} else {
// this doc is already v2, encode to buf
if err := encoder.Encode(input); err != nil {
return nil, err
}
break
}
}
}
return buf.Bytes(), nil
}
// transformDocs transforms the configuration file from v1 format to v2
// yaml.MapSlice will preserve the order in a map
func transformDocs(kind string, input yaml.MapSlice) ([]yaml.MapSlice, error) {
var transformed []yaml.MapSlice
for _, entry := range input {
entryName, ok := entry.Key.(string)
if !ok {
return nil, fmt.Errorf("unexpected non-string key for entry in '%s': %v", kind, entry.Key)
}
entryBody := ProcessValue(entry.Value, kind == "toolsets")
currentTransformed := yaml.MapSlice{
{Key: "kind", Value: kind},
{Key: "name", Value: entryName},
}
// Merge the transformed body into our result
if bodySlice, ok := entryBody.(yaml.MapSlice); ok {
currentTransformed = append(currentTransformed, bodySlice...)
} else {
return nil, fmt.Errorf("unable to convert entryBody to MapSlice")
}
transformed = append(transformed, currentTransformed)
}
return transformed, nil
}
// ProcessValue recursively looks for MapSlices to rename 'kind' -> 'type'
func ProcessValue(v any, isToolset bool) any {
switch val := v.(type) {
case yaml.MapSlice:
// creating a new MapSlice is safer for recursive transformation
newVal := make(yaml.MapSlice, len(val))
for i, item := range val {
// Perform renaming
if item.Key == "kind" {
item.Key = "type"
}
// Recursive call for nested values (e.g., nested objects or lists)
item.Value = ProcessValue(item.Value, false)
newVal[i] = item
}
return newVal
case []any:
// Process lists: If it's a toolset top-level list, wrap it.
if isToolset {
return yaml.MapSlice{{Key: "tools", Value: val}}
}
// Otherwise, recurse into list items (to catch nested objects)
newVal := make([]any, len(val))
for i := range val {
newVal[i] = ProcessValue(val[i], false)
}
return newVal
default:
return val
}
}
// mergeToolsFiles merges multiple ToolsFile structs into one.
// Detects and raises errors for resource conflicts in sources, authServices, tools, and toolsets.
// All resource names (sources, authServices, tools, toolsets) must be unique across all files.
func mergeToolsFiles(files ...ToolsFile) (ToolsFile, error) {
merged := ToolsFile{
Sources: make(server.SourceConfigs),
AuthServices: make(server.AuthServiceConfigs),
EmbeddingModels: make(server.EmbeddingModelConfigs),
Tools: make(server.ToolConfigs),
Toolsets: make(server.ToolsetConfigs),
Prompts: make(server.PromptConfigs),
}
var conflicts []string
for fileIndex, file := range files {
// Check for conflicts and merge sources
for name, source := range file.Sources {
if _, exists := merged.Sources[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("source '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Sources[name] = source
}
}
// Check for conflicts and merge authServices
for name, authService := range file.AuthServices {
if _, exists := merged.AuthServices[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("authService '%s' (file #%d)", name, fileIndex+1))
} else {
merged.AuthServices[name] = authService
}
}
// Check for conflicts and merge embeddingModels
for name, em := range file.EmbeddingModels {
if _, exists := merged.EmbeddingModels[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("embedding model '%s' (file #%d)", name, fileIndex+1))
} else {
merged.EmbeddingModels[name] = em
}
}
// Check for conflicts and merge tools
for name, tool := range file.Tools {
if _, exists := merged.Tools[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("tool '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Tools[name] = tool
}
}
// Check for conflicts and merge toolsets
for name, toolset := range file.Toolsets {
if _, exists := merged.Toolsets[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("toolset '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Toolsets[name] = toolset
}
}
// Check for conflicts and merge prompts
for name, prompt := range file.Prompts {
if _, exists := merged.Prompts[name]; exists {
conflicts = append(conflicts, fmt.Sprintf("prompt '%s' (file #%d)", name, fileIndex+1))
} else {
merged.Prompts[name] = prompt
}
}
}
// If conflicts were detected, return an error
if len(conflicts) > 0 {
return ToolsFile{}, fmt.Errorf("resource conflicts detected:\n - %s\n\nPlease ensure each source, authService, tool, toolset and prompt has a unique name across all files", strings.Join(conflicts, "\n - "))
}
return merged, nil
}
// LoadAndMergeToolsFiles loads multiple YAML files and merges them
func LoadAndMergeToolsFiles(ctx context.Context, filePaths []string) (ToolsFile, error) {
var toolsFiles []ToolsFile
for _, filePath := range filePaths {
buf, err := os.ReadFile(filePath)
if err != nil {
return ToolsFile{}, fmt.Errorf("unable to read tool file at %q: %w", filePath, err)
}
toolsFile, err := parseToolsFile(ctx, buf)
if err != nil {
return ToolsFile{}, fmt.Errorf("unable to parse tool file at %q: %w", filePath, err)
}
toolsFiles = append(toolsFiles, toolsFile)
}
mergedFile, err := mergeToolsFiles(toolsFiles...)
if err != nil {
return ToolsFile{}, fmt.Errorf("unable to merge tools files: %w", err)
}
return mergedFile, nil
}
// LoadAndMergeToolsFolder loads all YAML files from a directory and merges them
func LoadAndMergeToolsFolder(ctx context.Context, folderPath string) (ToolsFile, error) {
// Check if directory exists
info, err := os.Stat(folderPath)
if err != nil {
return ToolsFile{}, fmt.Errorf("unable to access tools folder at %q: %w", folderPath, err)
}
if !info.IsDir() {
return ToolsFile{}, fmt.Errorf("path %q is not a directory", folderPath)
}
// Find all YAML files in the directory
pattern := filepath.Join(folderPath, "*.yaml")
yamlFiles, err := filepath.Glob(pattern)
if err != nil {
return ToolsFile{}, fmt.Errorf("error finding YAML files in %q: %w", folderPath, err)
}
// Also find .yml files
ymlPattern := filepath.Join(folderPath, "*.yml")
ymlFiles, err := filepath.Glob(ymlPattern)
if err != nil {
return ToolsFile{}, fmt.Errorf("error finding YML files in %q: %w", folderPath, err)
}
// Combine both file lists
allFiles := append(yamlFiles, ymlFiles...)
if len(allFiles) == 0 {
return ToolsFile{}, fmt.Errorf("no YAML files found in directory %q", folderPath)
}
// Use existing LoadAndMergeToolsFiles function
return LoadAndMergeToolsFiles(ctx, allFiles)
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,30 +0,0 @@
// Copyright 2024 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"io"
)
// Option is a function that configures a Command.
type Option func(*Command)
// WithStreams overrides the default writer.
func WithStreams(out, err io.Writer) Option {
return func(c *Command) {
c.outStream = out
c.errStream = err
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 241 KiB

After

Width:  |  Height:  |  Size: 271 KiB

View File

@@ -44,6 +44,12 @@ See [Usage Examples](../reference/cli.md#examples).
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_active_queries`: Lists ongoing queries.
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
* `list_installed_extensions`: List all installed PostgreSQL extensions.
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
* `list_locks`: Identifies all locks held by active processes.
* `replication_stats`: Lists each replica's process ID and sync state.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
@@ -59,12 +65,16 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_query_stats`: Lists query statistics.
* `get_column_cardinality`: Gets column cardinality.
* `list_table_stats`: Lists table statistics.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the AlloyDB instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_stored_procedure`: Lists stored procedures.
## AlloyDB Postgres Admin
@@ -113,6 +123,12 @@ See [Usage Examples](../reference/cli.md#examples).
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_active_queries`: Lists ongoing queries.
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
* `list_installed_extensions`: List all installed PostgreSQL extensions.
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
* `list_locks`: Identifies all locks held by active processes.
* `replication_stats`: Lists each replica's process ID and sync state.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_columnar_configurations`: List AlloyDB Omni columnar-related configurations.
@@ -130,12 +146,16 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_query_stats`: Lists query statistics.
* `get_column_cardinality`: Gets column cardinality.
* `list_table_stats`: Lists table statistics.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the AlloyDB instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_stored_procedure`: Lists stored procedures.
## BigQuery
@@ -173,6 +193,21 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_table_ids`: Lists tables.
* `search_catalog`: Search for entries based on the provided query.
## ClickHouse
* `--prebuilt` value: `clickhouse`
* **Environment Variables:**
* `CLICKHOUSE_HOST`: The hostname or IP address of the ClickHouse server.
* `CLICKHOUSE_PORT`: The port number of the ClickHouse server.
* `CLICKHOUSE_USER`: The database username.
* `CLICKHOUSE_PASSWORD`: The password for the database user.
* `CLICKHOUSE_DATABASE`: The name of the database to connect to.
* `CLICKHOUSE_PROTOCOL`: The protocol to use (e.g., http).
* **Tools:**
* `execute_sql`: Use this tool to execute SQL.
* `list_databases`: Use this tool to list all databases in ClickHouse.
* `list_tables`: Use this tool to list all tables in a specific ClickHouse database.
## Cloud SQL for MySQL
* `--prebuilt` value: `cloud-sql-mysql`
@@ -270,6 +305,12 @@ See [Usage Examples](../reference/cli.md#examples).
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_active_queries`: Lists ongoing queries.
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
* `list_installed_extensions`: List all installed PostgreSQL extensions.
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
* `list_locks`: Identifies all locks held by active processes.
* `replication_stats`: Lists each replica's process ID and sync state.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
@@ -285,12 +326,16 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_query_stats`: Lists query statistics.
* `get_column_cardinality`: Gets column cardinality.
* `list_table_stats`: Lists table statistics.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the postgreSQL instance.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_stored_procedure`: Lists stored procedures.
## Cloud SQL for PostgreSQL Observability
@@ -336,6 +381,7 @@ See [Usage Examples](../reference/cli.md#examples).
* `create_user`: Creates a new user in a Cloud SQL instance.
* `wait_for_operation`: Waits for a Cloud SQL operation to complete.
* `clone_instance`: Creates a clone for an existing Cloud SQL for PostgreSQL instance.
* `postgres_upgrade_precheck`: Performs a precheck for a major version upgrade of a Cloud SQL for PostgreSQL instance.
* `create_backup`: Creates a backup on a Cloud SQL instance.
* `restore_backup`: Restores a backup of a Cloud SQL instance.
@@ -420,6 +466,15 @@ See [Usage Examples](../reference/cli.md#examples).
* `search_aspect_types`: Finds aspect types relevant to the
query.
## Elasticsearch
* `--prebuilt` value: `elasticsearch`
* **Environment Variables:**
* `ELASTICSEARCH_HOST`: The hostname or IP address of the Elasticsearch server.
* `ELASTICSEARCH_APIKEY`: The API key for authentication.
* **Tools:**
* `execute_esql_query`: Use this tool to execute ES|QL queries.
## Firestore
* `--prebuilt` value: `firestore`
@@ -537,6 +592,19 @@ See [Usage Examples](../reference/cli.md#examples).
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
## MindsDB
* `--prebuilt` value: `mindsdb`
* **Environment Variables:**
* `MINDSDB_HOST`: The hostname or IP address of the MindsDB server.
* `MINDSDB_PORT`: The port number of the MindsDB server.
* `MINDSDB_DATABASE`: The name of the database to connect to.
* `MINDSDB_USER`: The database username.
* `MINDSDB_PASS`: The password for the database user.
* **Tools:**
* `mindsdb-execute-sql`: Execute SQL queries directly on MindsDB database.
* `mindsdb-sql`: Execute parameterized SQL queries on MindsDB database.
## MySQL
* `--prebuilt` value: `mysql`
@@ -592,6 +660,12 @@ See [Usage Examples](../reference/cli.md#examples).
* **Tools:**
* `execute_sql`: Executes a SQL query.
* `list_tables`: Lists tables in the database.
* `list_active_queries`: Lists ongoing queries.
* `list_available_extensions`: Discover all PostgreSQL extensions available for installation.
* `list_installed_extensions`: List all installed PostgreSQL extensions.
* `long_running_transactions`: Identifies and lists database transactions that exceed a specified time limit.
* `list_locks`: Identifies all locks held by active processes.
* `replication_stats`: Lists each replica's process ID and sync state.
* `list_autovacuum_configurations`: Lists autovacuum configurations in the
database.
* `list_memory_configurations`: Lists memory-related configurations in the
@@ -607,12 +681,16 @@ See [Usage Examples](../reference/cli.md#examples).
* `list_triggers`: Lists triggers in the database.
* `list_indexes`: List available user indexes in a PostgreSQL database.
* `list_sequences`: List sequences in a PostgreSQL database.
* `list_query_stats`: Lists query statistics.
* `get_column_cardinality`: Gets column cardinality.
* `list_table_stats`: Lists table statistics.
* `list_publication_tables`: List publication tables in a PostgreSQL database.
* `list_tablespaces`: Lists tablespaces in the database.
* `list_pg_settings`: List configuration parameters for the PostgreSQL server.
* `list_database_stats`: Lists the key performance and activity statistics for
each database in the PostgreSQL server.
* `list_roles`: Lists all the user-created roles in PostgreSQL database.
* `list_stored_procedure`: Lists stored procedures.
## Google Cloud Serverless for Apache Spark
@@ -627,6 +705,38 @@ See [Usage Examples](../reference/cli.md#examples).
view serverless batches.
* **Tools:**
* `list_batches`: Lists Spark batches.
* `get_batch`: Gets information about a Spark batch.
* `cancel_batch`: Cancels a Spark batch.
* `create_pyspark_batch`: Creates a PySpark batch.
* `create_spark_batch`: Creates a Spark batch.
## SingleStore
* `--prebuilt` value: `singlestore`
* **Environment Variables:**
* `SINGLESTORE_HOST`: The hostname or IP address of the SingleStore server.
* `SINGLESTORE_PORT`: The port number of the SingleStore server.
* `SINGLESTORE_DATABASE`: The name of the database to connect to.
* `SINGLESTORE_USER`: The database username.
* `SINGLESTORE_PASSWORD`: The password for the database user.
* **Tools:**
* `execute_sql`: Use this tool to execute SQL.
* `list_tables`: Lists detailed schema information for user-created tables.
## Snowflake
* `--prebuilt` value: `snowflake`
* **Environment Variables:**
* `SNOWFLAKE_ACCOUNT`: The Snowflake account.
* `SNOWFLAKE_USER`: The database username.
* `SNOWFLAKE_PASSWORD`: The password for the database user.
* `SNOWFLAKE_DATABASE`: The name of the database to connect to.
* `SNOWFLAKE_SCHEMA`: The schema name.
* `SNOWFLAKE_WAREHOUSE`: The warehouse name.
* `SNOWFLAKE_ROLE`: The role name.
* **Tools:**
* `execute_sql`: Use this tool to execute SQL.
* `list_tables`: Lists detailed schema information for user-created tables.
## Spanner (GoogleSQL dialect)

2
go.mod
View File

@@ -29,7 +29,7 @@ require (
github.com/fsnotify/fsnotify v1.9.0
github.com/go-chi/chi/v5 v5.2.3
github.com/go-chi/cors v1.2.2
github.com/go-chi/httplog/v2 v2.1.1
github.com/go-chi/httplog/v3 v3.3.0
github.com/go-chi/render v1.0.3
github.com/go-goquery/goquery v1.0.1
github.com/go-playground/validator/v10 v10.28.0

4
go.sum
View File

@@ -902,8 +902,8 @@ github.com/go-chi/chi/v5 v5.2.3 h1:WQIt9uxdsAbgIYgid+BpYc+liqQZGMHRaUwp0JUcvdE=
github.com/go-chi/chi/v5 v5.2.3/go.mod h1:L2yAIGWB3H+phAw1NxKwWM+7eUH/lU8pOMm5hHcoops=
github.com/go-chi/cors v1.2.2 h1:Jmey33TE+b+rB7fT8MUy1u0I4L+NARQlK6LhzKPSyQE=
github.com/go-chi/cors v1.2.2/go.mod h1:sSbTewc+6wYHBBCW7ytsFSn836hqM7JxpglAy2Vzc58=
github.com/go-chi/httplog/v2 v2.1.1 h1:ojojiu4PIaoeJ/qAO4GWUxJqvYUTobeo7zmuHQJAxRk=
github.com/go-chi/httplog/v2 v2.1.1/go.mod h1:/XXdxicJsp4BA5fapgIC3VuTD+z0Z/VzukoB3VDc1YE=
github.com/go-chi/httplog/v3 v3.3.0 h1:Gr6Y7nSzbpyCyRwKPOVKjDH3BH6TH5uvRNDsTZWDpvU=
github.com/go-chi/httplog/v3 v3.3.0/go.mod h1:N/J1l5l1fozUrqIVuT8Z/HzNeSy8TF2EFyokPLe6y2w=
github.com/go-chi/render v1.0.3 h1:AsXqd2a1/INaIfUSKq3G5uA8weYx20FOsM7uSoCyyt4=
github.com/go-chi/render v1.0.3/go.mod h1:/gr3hVkmYR0YlEy3LxCuVRFzEu9Ruok+gFqbIofjao0=
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=

View File

@@ -59,25 +59,35 @@ func NewStdLogger(outW, errW io.Writer, logLevel string) (Logger, error) {
}
// DebugContext logs debug messages
func (sl *StdLogger) DebugContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StdLogger) DebugContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.outLogger.DebugContext(ctx, msg, keysAndValues...)
}
// InfoContext logs debug messages
func (sl *StdLogger) InfoContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StdLogger) InfoContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.outLogger.InfoContext(ctx, msg, keysAndValues...)
}
// WarnContext logs warning messages
func (sl *StdLogger) WarnContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StdLogger) WarnContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.errLogger.WarnContext(ctx, msg, keysAndValues...)
}
// ErrorContext logs error messages
func (sl *StdLogger) ErrorContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StdLogger) ErrorContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.errLogger.ErrorContext(ctx, msg, keysAndValues...)
}
// SlogLogger returns a single standard *slog.Logger that routes
// records to the outLogger or errLogger based on the log level.
func (sl *StdLogger) SlogLogger() *slog.Logger {
splitHandler := &SplitHandler{
OutHandler: sl.outLogger.Handler(),
ErrHandler: sl.errLogger.Handler(),
}
return slog.New(splitHandler)
}
const (
Debug = "DEBUG"
Info = "INFO"
@@ -177,21 +187,64 @@ func NewStructuredLogger(outW, errW io.Writer, logLevel string) (Logger, error)
}
// DebugContext logs debug messages
func (sl *StructuredLogger) DebugContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StructuredLogger) DebugContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.outLogger.DebugContext(ctx, msg, keysAndValues...)
}
// InfoContext logs info messages
func (sl *StructuredLogger) InfoContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StructuredLogger) InfoContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.outLogger.InfoContext(ctx, msg, keysAndValues...)
}
// WarnContext logs warning messages
func (sl *StructuredLogger) WarnContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StructuredLogger) WarnContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.errLogger.WarnContext(ctx, msg, keysAndValues...)
}
// ErrorContext logs error messages
func (sl *StructuredLogger) ErrorContext(ctx context.Context, msg string, keysAndValues ...interface{}) {
func (sl *StructuredLogger) ErrorContext(ctx context.Context, msg string, keysAndValues ...any) {
sl.errLogger.ErrorContext(ctx, msg, keysAndValues...)
}
// SlogLogger returns a single standard *slog.Logger that routes
// records to the outLogger or errLogger based on the log level.
func (sl *StructuredLogger) SlogLogger() *slog.Logger {
splitHandler := &SplitHandler{
OutHandler: sl.outLogger.Handler(),
ErrHandler: sl.errLogger.Handler(),
}
return slog.New(splitHandler)
}
type SplitHandler struct {
OutHandler slog.Handler
ErrHandler slog.Handler
}
func (h *SplitHandler) Enabled(ctx context.Context, level slog.Level) bool {
if level >= slog.LevelWarn {
return h.ErrHandler.Enabled(ctx, level)
}
return h.OutHandler.Enabled(ctx, level)
}
func (h *SplitHandler) Handle(ctx context.Context, r slog.Record) error {
if r.Level >= slog.LevelWarn {
return h.ErrHandler.Handle(ctx, r)
}
return h.OutHandler.Handle(ctx, r)
}
func (h *SplitHandler) WithAttrs(attrs []slog.Attr) slog.Handler {
return &SplitHandler{
OutHandler: h.OutHandler.WithAttrs(attrs),
ErrHandler: h.ErrHandler.WithAttrs(attrs),
}
}
func (h *SplitHandler) WithGroup(name string) slog.Handler {
return &SplitHandler{
OutHandler: h.OutHandler.WithGroup(name),
ErrHandler: h.ErrHandler.WithGroup(name),
}
}

View File

@@ -16,16 +16,20 @@ package log
import (
"context"
"log/slog"
)
// Logger is the interface used throughout the project for logging.
type Logger interface {
// DebugContext is for reporting additional information about internal operations.
DebugContext(ctx context.Context, format string, args ...interface{})
DebugContext(ctx context.Context, format string, args ...any)
// InfoContext is for reporting informational messages.
InfoContext(ctx context.Context, format string, args ...interface{})
InfoContext(ctx context.Context, format string, args ...any)
// WarnContext is for reporting warning messages.
WarnContext(ctx context.Context, format string, args ...interface{})
WarnContext(ctx context.Context, format string, args ...any)
// ErrorContext is for reporting errors.
ErrorContext(ctx context.Context, format string, args ...interface{})
ErrorContext(ctx context.Context, format string, args ...any)
// Single standard slog.Logger that routes records to the outLogger or
// errLogger based on log levels
SlogLogger() *slog.Logger
}

View File

@@ -28,7 +28,7 @@ import (
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/go-chi/cors"
"github.com/go-chi/httplog/v2"
"github.com/go-chi/httplog/v3"
"github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels"
"github.com/googleapis/genai-toolbox/internal/log"
@@ -347,31 +347,16 @@ func NewServer(ctx context.Context, cfg ServerConfig) (*Server, error) {
if err != nil {
return nil, fmt.Errorf("unable to initialize http log: %w", err)
}
var httpOpts httplog.Options
switch cfg.LoggingFormat.String() {
case "json":
httpOpts = httplog.Options{
JSON: true,
LogLevel: logLevel,
Concise: true,
RequestHeaders: false,
MessageFieldName: "message",
SourceFieldName: "logging.googleapis.com/sourceLocation",
TimeFieldName: "timestamp",
LevelFieldName: "severity",
}
case "standard":
httpOpts = httplog.Options{
LogLevel: logLevel,
Concise: true,
RequestHeaders: false,
MessageFieldName: "message",
}
default:
return nil, fmt.Errorf("invalid Logging format: %q", cfg.LoggingFormat.String())
schema := *httplog.SchemaGCP
schema.Level = cfg.LogLevel.String()
schema.Concise(true)
httpOpts := &httplog.Options{
Level: logLevel,
Schema: &schema,
}
httpLogger := httplog.NewLogger("httplog", httpOpts)
r.Use(httplog.RequestLogger(httpLogger))
logger := l.SlogLogger()
r.Use(httplog.RequestLogger(logger, httpOpts))
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := InitializeConfigs(ctx, cfg)
if err != nil {

View File

@@ -36,15 +36,17 @@ var (
AlloyDBDatabase = "postgres"
)
// Copied over from postgres.go
func initPostgresConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) {
// urlExample := "postgres:dd//username:password@localhost:5432/database_name"
url := &url.URL{
func buildPostgresURL(host, port, user, pass, dbname string) *url.URL {
return &url.URL{
Scheme: "postgres",
User: url.UserPassword(user, pass),
Host: fmt.Sprintf("%s:%s", host, port),
Path: dbname,
}
}
func initPostgresConnectionPool(host, port, user, pass, dbname string) (*pgxpool.Pool, error) {
url := buildPostgresURL(host, port, user, pass, dbname)
pool, err := pgxpool.New(context.Background(), url.String())
if err != nil {
return nil, fmt.Errorf("Unable to create connection pool: %w", err)
@@ -63,7 +65,8 @@ func setupAlloyDBContainer(ctx context.Context, t *testing.T) (string, string, f
"POSTGRES_PASSWORD": AlloyDBPass,
},
WaitingFor: wait.ForAll(
wait.ForLog("Post Startup: Successfully reinstalled extensions"),
wait.ForLog("database system was shut down at"),
wait.ForLog("database system is ready to accept connections"),
wait.ForExposedPort(),
),
}

View File

@@ -21,6 +21,7 @@ import (
"os"
yaml "github.com/goccy/go-yaml"
"github.com/spf13/cobra"
"github.com/googleapis/genai-toolbox/cmd"
)
@@ -50,7 +51,7 @@ func tmpFileWithCleanup(content []byte) (string, func(), error) {
type CmdExec struct {
Out io.ReadCloser
cmd *cmd.Command
cmd *cobra.Command
cancel context.CancelFunc
closers []io.Closer
done chan bool // closed once the cmd is completed
@@ -77,7 +78,7 @@ func StartCmd(ctx context.Context, toolsFile map[string]any, args ...string) (*C
return nil, nil, fmt.Errorf("unable to open stdout pipe: %w", err)
}
c := cmd.NewCommand(cmd.WithStreams(pw, pw))
c := cmd.GenerateCommand(pw, pw)
c.SetArgs(args)
t := &CmdExec{