Compare commits

..

1 Commits

Author SHA1 Message Date
Haoyu Wang
fd60b849b1 feat(cli/skills): add support for generating agent skills from toolset 2026-01-29 21:55:57 -05:00
20 changed files with 1149 additions and 98 deletions

View File

@@ -35,6 +35,7 @@ import (
yaml "github.com/goccy/go-yaml" yaml "github.com/goccy/go-yaml"
"github.com/googleapis/genai-toolbox/internal/auth" "github.com/googleapis/genai-toolbox/internal/auth"
"github.com/googleapis/genai-toolbox/internal/cli/invoke" "github.com/googleapis/genai-toolbox/internal/cli/invoke"
"github.com/googleapis/genai-toolbox/internal/cli/skills"
"github.com/googleapis/genai-toolbox/internal/embeddingmodels" "github.com/googleapis/genai-toolbox/internal/embeddingmodels"
"github.com/googleapis/genai-toolbox/internal/log" "github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/prebuiltconfigs" "github.com/googleapis/genai-toolbox/internal/prebuiltconfigs"
@@ -401,6 +402,8 @@ func NewCommand(opts ...Option) *Command {
// Register subcommands for tool invocation // Register subcommands for tool invocation
baseCmd.AddCommand(invoke.NewCommand(cmd)) baseCmd.AddCommand(invoke.NewCommand(cmd))
// Register subcommands for skill generation
baseCmd.AddCommand(skills.NewCommand(cmd))
return cmd return cmd
} }

166
cmd/skill_generate_test.go Normal file
View File

@@ -0,0 +1,166 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"context"
"os"
"path/filepath"
"strings"
"testing"
"time"
)
func TestGenerateSkill(t *testing.T) {
// Create a temporary directory for tests
tmpDir := t.TempDir()
outputDir := filepath.Join(tmpDir, "skills")
// Create a tools.yaml file with a sqlite tool
toolsFileContent := `
sources:
my-sqlite:
kind: sqlite
database: test.db
tools:
hello-sqlite:
kind: sqlite-sql
source: my-sqlite
description: "hello tool"
statement: "SELECT 'hello' as greeting"
`
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte(toolsFileContent), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
args := []string{
"skills-generate",
"--tools-file", toolsFilePath,
"--output-dir", outputDir,
"--name", "hello-sqlite",
"--description", "hello tool",
}
// We need a longer timeout because generate-skill starts a server and polls it
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
_, got, err := invokeCommandWithContext(ctx, args)
if err != nil {
t.Fatalf("command failed: %v\nOutput: %s", err, got)
}
// Verify generated directory structure
skillPath := filepath.Join(outputDir, "hello-sqlite")
if _, err := os.Stat(skillPath); os.IsNotExist(err) {
t.Fatalf("skill directory not created: %s", skillPath)
}
// Check SKILL.md
skillMarkdown := filepath.Join(skillPath, "SKILL.md")
content, err := os.ReadFile(skillMarkdown)
if err != nil {
t.Fatalf("failed to read SKILL.md: %v", err)
}
if !strings.Contains(string(content), "name: hello-sqlite") {
t.Errorf("SKILL.md does not contain expected name")
}
if !strings.Contains(string(content), "description: hello tool") {
t.Errorf("SKILL.md does not contain expected description")
}
// Check script file
scriptFilename := "hello-sqlite.js"
scriptPath := filepath.Join(skillPath, "scripts", scriptFilename)
if _, err := os.Stat(scriptPath); os.IsNotExist(err) {
t.Fatalf("script file not created: %s", scriptPath)
}
scriptContent, err := os.ReadFile(scriptPath)
if err != nil {
t.Fatalf("failed to read script file: %v", err)
}
if !strings.Contains(string(scriptContent), "hello-sqlite") {
t.Errorf("script file does not contain expected tool name")
}
// Check assets
assetPath := filepath.Join(skillPath, "assets", "hello-sqlite.yaml")
if _, err := os.Stat(assetPath); os.IsNotExist(err) {
t.Fatalf("asset file not created: %s", assetPath)
}
assetContent, err := os.ReadFile(assetPath)
if err != nil {
t.Fatalf("failed to read asset file: %v", err)
}
if !strings.Contains(string(assetContent), "hello-sqlite") {
t.Errorf("asset file does not contain expected tool name")
}
}
func TestGenerateSkill_NoConfig(t *testing.T) {
tmpDir := t.TempDir()
outputDir := filepath.Join(tmpDir, "skills")
args := []string{
"skills-generate",
"--output-dir", outputDir,
"--name", "test",
"--description", "test",
}
_, _, err := invokeCommandWithContext(context.Background(), args)
if err == nil {
t.Fatal("expected command to fail when no configuration is provided and tools.yaml is missing")
}
// Should not have created the directory if no config was processed
if _, err := os.Stat(outputDir); !os.IsNotExist(err) {
t.Errorf("output directory should not have been created")
}
}
func TestGenerateSkill_MissingArguments(t *testing.T) {
tmpDir := t.TempDir()
toolsFilePath := filepath.Join(tmpDir, "tools.yaml")
if err := os.WriteFile(toolsFilePath, []byte("tools: {}"), 0644); err != nil {
t.Fatalf("failed to write tools file: %v", err)
}
tests := []struct {
name string
args []string
}{
{
name: "missing name",
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--description", "test"},
},
{
name: "missing description",
args: []string{"skills-generate", "--tools-file", toolsFilePath, "--name", "test"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, got, err := invokeCommandWithContext(context.Background(), tt.args)
if err == nil {
t.Fatalf("expected command to fail due to missing arguments, but it succeeded\nOutput: %s", got)
}
})
}
}

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres-admin```.
You'll now be able to see all enabled tools in the "Tools" tab. You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE] > [!NOTE]

View File

@@ -27,13 +27,6 @@ For AlloyDB infrastructure management, search the MCP store for the AlloyDB for
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt alloydb-postgres```.
2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [cluster](https://docs.cloud.google.com/alloydb/docs/cluster-list) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the BigQuery MCP server can use its AI capabilities
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt bigquery```.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql-admin```.
You'll now be able to see all enabled tools in the "Tools" tab. You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE] > [!NOTE]

View File

@@ -24,13 +24,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mssql```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/sqlserver/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql-admin```.
You'll now be able to see all enabled tools in the "Tools" tab. You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE] > [!NOTE]

View File

@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-mysql```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/mysql/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -23,14 +23,6 @@ To connect to the database to explore and query data, search the MCP store for t
In the Antigravity MCP Store, click the "Install" button. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres-admin```.
You'll now be able to see all enabled tools in the "Tools" tab. You'll now be able to see all enabled tools in the "Tools" tab.
> [!NOTE] > [!NOTE]

View File

@@ -26,13 +26,6 @@ For Cloud SQL infrastructure management, search the MCP store for the Cloud SQL
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt cloud-sql-postgres```.
2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [instance](https://cloud.google.com/sql/docs/postgres/instance-info) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -20,13 +20,6 @@ An editor configured to use the Dataplex MCP server can use its AI capabilities
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt dataplex```.
2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the Looker MCP server can use its AI capabilities to
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt looker```.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [instance](https://docs.cloud.google.com/looker/docs/set-up-and-administer-looker) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -21,13 +21,6 @@ An editor configured to use the Cloud Spanner MCP server can use its AI capabili
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the "Install" button. 1. In the Antigravity MCP Store, click the "Install" button.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest --prebuilt spanner```.
2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab. 2. Add the required inputs for your [instance](https://docs.cloud.google.com/spanner/docs/instances) in the configuration pop-up, then click "Save". You can update this configuration at any time in the "Configure" tab.

View File

@@ -12,17 +12,10 @@ The MCP Toolbox for Databases Server gives AI-powered development tools the abil
## Install & Configuration ## Install & Configuration
1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear. 1. In the Antigravity MCP Store, click the **Install** button. A configuration window will appear.
> [!NOTE]
> On first use, the installation process automatically downloads and uses
> [MCP Toolbox](https://www.npmjs.com/package/@toolbox-sdk/server)
> `>=0.26.0`. To update MCP Toolbox, use:
> ```npm i -g @toolbox-sdk/server@latest```
> To always run the latest version, update the MCP server configuration to use:
> ```npx -y @toolbox-sdk/server@latest```.
3. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/). 2. Create your [`tools.yaml` configuration file](https://googleapis.github.io/genai-toolbox/getting-started/configure/).
4. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**. 3. In the configuration window, enter the full absolute path to your `tools.yaml` file and click **Save**.
> [!NOTE] > [!NOTE]
> If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details. > If you encounter issues with Windows Defender blocking the execution, you may need to configure an allowlist. See [Configure exclusions for Microsoft Defender Antivirus](https://learn.microsoft.com/en-us/microsoft-365/security/defender-endpoint/configure-exclusions-microsoft-defender-antivirus?view=o365-worldwide) for more details.

View File

@@ -0,0 +1,107 @@
---
title: "Generate a Skill"
type: docs
weight: 10
description: >
How to generate a skill from a toolset configuration.
---
The `skills-generate` command allows you to convert a **toolset** into a **skill**. A toolset is a collection of tools, and the generated skill will contain metadata and execution scripts for all tools within that toolset.
## Before you begin
1. Make sure you have the `toolbox` executable in your PATH.
2. Make sure you have [Node.js](https://nodejs.org/) installed on your system.
## Generating a Skill from a Toolset
A skill package consists of a `SKILL.md` file and a set of Node.js scripts. Each tool defined in your toolset maps to a corresponding script in the generated skill.
### Command Signature
The `skills-generate` command follows this signature:
```bash
toolbox [--tools-file <path> | --prebuilt <name>] skills-generate \
--name <skill-name> \
--toolset <toolset-name> \
--description <description> \
--output-dir <output-directory>
```
### Example: Custom Tools File
1. Create a `tools.yaml` file with a toolset and some tools:
```yaml
tools:
tool_a:
description: "First tool"
run:
command: "echo 'Tool A'"
tool_b:
description: "Second tool"
run:
command: "echo 'Tool B'"
toolsets:
my_toolset:
tools:
- tool_a
- tool_b
```
2. Generate the skill:
```bash
toolbox --tools-file tools.yaml skills-generate \
--name "my-multi-tool-skill" \
--toolset "my_toolset" \
--description "A skill containing multiple tools" \
--output-dir "generated-skills"
```
3. The generated skill directory structure:
```text
generated-skills/
└── my-multi-tool-skill/
├── SKILL.md
├── assets/
│ ├── tool_a.yaml
│ └── tool_b.yaml
└── scripts/
├── tool_a.js
└── tool_b.js
```
In this example, the skill contains two Node.js scripts (`tool_a.js` and `tool_b.js`), each mapping to a tool in the original toolset.
### Example: Prebuilt Configuration
You can also generate skills from prebuilt toolsets:
```bash
toolbox --prebuilt alloydb-postgres-admin skills-generate \
--name "alloydb-postgres-admin" \
--description "skill for performing administrative operations on alloydb"
```
## Output Directory
By default, skills are generated in the `skills` directory. You can specify a different output directory using the `--output-dir` flag.
## Shared Node.js Scripts
The `skills-generate` command generates shared Node.js scripts (`.js`) that work across different platforms (Linux, macOS, Windows). This ensures that the generated skills are portable.
## Installing the Generated Skill in Gemini CLI
Once you have generated a skill, you can install it into the Gemini CLI using the `gemini skills install` command.
### Installation Command
Provide the path to the directory containing the generated skill:
```bash
gemini skills install /path/to/generated-skills/my-multi-tool-skill
```

View File

@@ -32,7 +32,8 @@ description: >
## Sub Commands ## Sub Commands
### `invoke` <details>
<summary><code>invoke</code></summary>
Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup. Executes a tool directly with the provided parameters. This is useful for testing tool configurations and parameters without needing a full client setup.
@@ -45,6 +46,30 @@ toolbox invoke <tool-name> [params]
- `<tool-name>`: The name of the tool to execute (as defined in your configuration). - `<tool-name>`: The name of the tool to execute (as defined in your configuration).
- `[params]`: (Optional) A JSON string containing the parameters for the tool. - `[params]`: (Optional) A JSON string containing the parameters for the tool.
</details>
<details>
<summary><code>skills-generate</code></summary>
Generates a skill package from a specified toolset. Each tool in the toolset will have a corresponding Node.js execution script in the generated skill.
**Syntax:**
```bash
toolbox [--tools-file <path> | --prebuilt <name>] skills-generate --name <name> --toolset <toolset> --description <description> --output-dir <output>
```
**Flags:**
- `--name`: (Required) Name of the generated skill.
- `--toolset`: (Required) Name of the toolset to convert into a skill.
- `--description`: (Required) Description of the generated skill.
- `--output-dir`: Directory to output generated skills (default: "skills").
For more detailed instructions, see [Generate a Skill](../how-to/generate_skill.md).
</details>
## Examples ## Examples
### Transport Configuration ### Transport Configuration

View File

@@ -0,0 +1,298 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package skills
import (
"context"
_ "embed"
"fmt"
"os"
"path/filepath"
"github.com/googleapis/genai-toolbox/internal/log"
"github.com/googleapis/genai-toolbox/internal/server"
"github.com/googleapis/genai-toolbox/internal/server/resources"
"github.com/spf13/cobra"
)
// RootCommand defines the interface for required by invoke subcommand.
// This allows subcommands to access shared resources and functionality without
// direct coupling to the root command's implementation.
type RootCommand interface {
// Config returns a copy of the current server configuration.
Config() server.ServerConfig
// LoadConfig loads and merges the configuration from files, folders, and prebuilts.
LoadConfig(ctx context.Context) error
// Setup initializes the runtime environment, including logging and telemetry.
// It returns the updated context and a shutdown function to be called when finished.
Setup(ctx context.Context) (context.Context, func(context.Context) error, error)
// Logger returns the logger instance.
Logger() log.Logger
}
// Command is the command for generating skills.
type Command struct {
*cobra.Command
rootCmd RootCommand
name string
description string
toolset string
outputDir string
}
// Parameter represents a parameter of a tool.
type Parameter struct {
Name string `json:"name"`
Description string `json:"description"`
Type string `json:"type"`
Default interface{} `json:"default"`
Required bool `json:"required"`
}
// Tool represents a tool.
type Tool struct {
Name string `json:"name"`
Description string `json:"description"`
Parameters []Parameter `json:"parameters"`
}
// Config represents the structure of the tools.yaml file.
type Config struct {
Sources map[string]interface{} `yaml:"sources,omitempty"`
Tools map[string]map[string]interface{} `yaml:"tools"`
}
// serverConfig holds the configuration used to start the toolbox server.
type serverConfig struct {
prebuiltConfigs []string
toolsFile string
}
// NewCommand creates a new Command.
func NewCommand(rootCmd RootCommand) *cobra.Command {
cmd := &Command{
rootCmd: rootCmd,
}
cmd.Command = &cobra.Command{
Use: "skills-generate",
Short: "Generate skills from tool configurations",
RunE: func(c *cobra.Command, args []string) error {
return cmd.run(c)
},
}
cmd.Flags().StringVar(&cmd.outputDir, "output-dir", "skills", "Directory to output generated skills")
cmd.Flags().StringVar(&cmd.toolset, "toolset", "", "Name of the toolset (and generated skill folder). If provided, only tools in this toolset are generated.")
cmd.Flags().StringVar(&cmd.name, "name", "", "Name of the generated skill.")
cmd.Flags().StringVar(&cmd.description, "description", "", "Description of the generated skill")
cmd.MarkFlagRequired("name")
cmd.MarkFlagRequired("description")
return cmd.Command
}
func (c *Command) run(cmd *cobra.Command) error {
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
ctx, shutdown, err := c.rootCmd.Setup(ctx)
if err != nil {
return err
}
defer func() {
_ = shutdown(ctx)
}()
logger := c.rootCmd.Logger()
toolsFile, err := cmd.Flags().GetString("tools-file")
if err != nil {
errMsg := fmt.Errorf("error getting tools-file flag: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
prebuiltConfigs, err := cmd.Flags().GetStringSlice("prebuilt")
if err != nil {
errMsg := fmt.Errorf("error getting prebuilt flag: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Load and merge tool configurations
if err := c.rootCmd.LoadConfig(ctx); err != nil {
return err
}
if len(prebuiltConfigs) == 0 && toolsFile == "" {
logger.InfoContext(ctx, "No configurations found to process. Use --tools-file or --prebuilt.")
return nil
}
if err := os.MkdirAll(c.outputDir, 0755); err != nil {
errMsg := fmt.Errorf("error creating output directory: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
logger.InfoContext(ctx, fmt.Sprintf("Generating skill '%s'...", c.name))
config := serverConfig{
prebuiltConfigs: prebuiltConfigs,
toolsFile: toolsFile,
}
// Initialize toolbox and collect tools
allTools, err := c.collectTools(ctx)
if err != nil {
errMsg := fmt.Errorf("error collecting tools: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
if len(allTools) == 0 {
logger.InfoContext(ctx, "No tools found to generate.")
return nil
}
// Generate the combined skill
skillPath := filepath.Join(c.outputDir, c.name)
if err := os.MkdirAll(skillPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating skill directory: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
// Generate assets directory if needed
assetsPath := filepath.Join(skillPath, "assets")
if toolsFile != "" {
if err := os.MkdirAll(assetsPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating assets dir: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
}
// Generate scripts
scriptsPath := filepath.Join(skillPath, "scripts")
if err := os.MkdirAll(scriptsPath, 0755); err != nil {
errMsg := fmt.Errorf("error creating scripts dir: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
for _, tool := range allTools {
specificToolsFileName := ""
if toolsFile != "" {
minimizedContent, err := generateFilteredConfig(toolsFile, tool.Name)
if err != nil {
logger.ErrorContext(ctx, fmt.Sprintf("Error generating filtered config for %s: %v", tool.Name, err))
}
if minimizedContent != nil {
specificToolsFileName = fmt.Sprintf("%s.yaml", tool.Name)
destPath := filepath.Join(assetsPath, specificToolsFileName)
if err := os.WriteFile(destPath, minimizedContent, 0644); err != nil {
logger.ErrorContext(ctx, fmt.Sprintf("Error writing filtered config for %s: %v", tool.Name, err))
}
}
}
scriptContent, err := generateShellScriptContent(tool.Name, config, specificToolsFileName)
if err != nil {
logger.ErrorContext(ctx, fmt.Sprintf("Error generating script content for %s: %v", tool.Name, err))
} else {
scriptFilename := filepath.Join(scriptsPath, fmt.Sprintf("%s.js", tool.Name))
if err := os.WriteFile(scriptFilename, []byte(scriptContent), 0755); err != nil {
logger.ErrorContext(ctx, fmt.Sprintf("Error writing script %s: %v", scriptFilename, err))
}
}
}
// Generate SKILL.md
skillContent, err := generateSkillMarkdown(c.name, c.description, allTools)
if err != nil {
errMsg := fmt.Errorf("error generating SKILL.md content: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
skillMdPath := filepath.Join(skillPath, "SKILL.md")
if err := os.WriteFile(skillMdPath, []byte(skillContent), 0644); err != nil {
errMsg := fmt.Errorf("error writing SKILL.md: %w", err)
logger.ErrorContext(ctx, errMsg.Error())
return errMsg
}
logger.InfoContext(ctx, fmt.Sprintf("Successfully generated skill '%s' with %d tools.", c.name, len(allTools)))
return nil
}
func (c *Command) collectTools(ctx context.Context) (map[string]Tool, error) {
// Initialize Resources
sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap, err := server.InitializeConfigs(ctx, c.rootCmd.Config())
if err != nil {
return nil, fmt.Errorf("failed to initialize resources: %w", err)
}
resourceMgr := resources.NewResourceManager(sourcesMap, authServicesMap, embeddingModelsMap, toolsMap, toolsetsMap, promptsMap, promptsetsMap)
result := make(map[string]Tool)
var toolsToProcess []string
if c.toolset != "" {
ts, ok := resourceMgr.GetToolset(c.toolset)
if !ok {
return nil, fmt.Errorf("toolset %q not found", c.toolset)
}
toolsToProcess = ts.ToolNames
} else {
// All tools
for name := range toolsMap {
toolsToProcess = append(toolsToProcess, name)
}
}
for _, toolName := range toolsToProcess {
t, ok := resourceMgr.GetTool(toolName)
if !ok {
// Should happen only if toolset refers to non-existent tool, but good to check
continue
}
params := []Parameter{}
for _, p := range t.GetParameters() {
manifest := p.Manifest()
params = append(params, Parameter{
Name: p.GetName(),
Description: manifest.Description, // Use description from manifest
Type: p.GetType(),
Default: p.GetDefault(),
Required: p.GetRequired(),
})
}
manifest := t.Manifest()
result[toolName] = Tool{
Name: toolName,
Description: manifest.Description,
Parameters: params,
}
}
return result, nil
}

View File

@@ -0,0 +1,247 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package skills
import (
"encoding/json"
"fmt"
"os"
"sort"
"strings"
"text/template"
"github.com/goccy/go-yaml"
)
const skillTemplate = `---
name: {{.SkillName}}
description: {{.SkillDescription}}
---
Here is a list of scripts which can be used.
{{range .Tools}}
# {{.Name}}
{{.Description}}
{{.ParametersSchema}}
## Usage
{{.Usage}}
---
{{end}}
`
type toolTemplateData struct {
Name string
Description string
ParametersSchema string
Usage string
}
type skillTemplateData struct {
SkillName string
SkillDescription string
Tools []toolTemplateData
}
func generateSkillMarkdown(skillName, skillDescription string, toolsMap map[string]Tool) (string, error) {
var toolsData []toolTemplateData
// Order tools based on name
var tools []Tool
for _, tool := range toolsMap {
tools = append(tools, tool)
}
sort.Slice(tools, func(i, j int) bool {
return tools[i].Name < tools[j].Name
})
for _, tool := range tools {
parametersSchema, err := formatParameters(tool.Parameters)
if err != nil {
return "", err
}
usage := fmt.Sprintf("`bash\nnode scripts/%s.js '{\"<param_name>\": \"<param_value>\"}'\n`", tool.Name)
toolsData = append(toolsData, toolTemplateData{
Name: tool.Name,
Description: tool.Description,
ParametersSchema: parametersSchema,
Usage: usage,
})
}
data := skillTemplateData{
SkillName: skillName,
SkillDescription: skillDescription,
Tools: toolsData,
}
tmpl, err := template.New("markdown").Parse(skillTemplate)
if err != nil {
return "", fmt.Errorf("error parsing markdown template: %w", err)
}
var buf strings.Builder
if err := tmpl.Execute(&buf, data); err != nil {
return "", fmt.Errorf("error executing markdown template: %w", err)
}
return buf.String(), nil
}
const nodeScriptTemplate = `#!/usr/bin/env node
const { spawn } = require('child_process');
const path = require('path');
const toolName = "{{.Name}}";
const prebuiltNames = {{.PrebuiltNamesJSON}};
const toolsFileName = "{{.ToolsFileName}}";
let configArgs = [];
if (prebuiltNames.length > 0) {
prebuiltNames.forEach(name => {
configArgs.push("--prebuilt", name);
});
}
if (toolsFileName) {
configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));
}
const args = process.argv.slice(2);
const toolboxArgs = [...configArgs, "invoke", toolName, ...args];
const command = process.platform === 'win32' ? 'toolbox.exe' : 'toolbox';
const child = spawn(command, toolboxArgs, { stdio: 'inherit' });
child.on('close', (code) => {
process.exit(code);
});
child.on('error', (err) => {
console.error("Error executing toolbox:", err);
process.exit(1);
});
`
type scriptData struct {
Name string
PrebuiltNamesJSON string
ToolsFileName string
}
func generateShellScriptContent(name string, config serverConfig, toolsFileName string) (string, error) {
prebuiltJSON, _ := json.Marshal(config.prebuiltConfigs)
data := scriptData{
Name: name,
PrebuiltNamesJSON: string(prebuiltJSON),
ToolsFileName: toolsFileName,
}
tmpl, err := template.New("script").Parse(nodeScriptTemplate)
if err != nil {
return "", fmt.Errorf("error parsing script template: %w", err)
}
var buf strings.Builder
if err := tmpl.Execute(&buf, data); err != nil {
return "", fmt.Errorf("error executing script template: %w", err)
}
return buf.String(), nil
}
func formatParameters(params []Parameter) (string, error) {
if len(params) == 0 {
return "", nil
}
properties := make(map[string]interface{})
var required []string
for _, p := range params {
paramMap := map[string]interface{}{
"type": p.Type,
"description": p.Description,
}
if p.Default != nil {
paramMap["default"] = p.Default
}
properties[p.Name] = paramMap
if p.Required {
required = append(required, p.Name)
}
}
schema := map[string]interface{}{
"type": "object",
"properties": properties,
}
if len(required) > 0 {
schema["required"] = required
}
schemaJSON, err := json.MarshalIndent(schema, "", " ")
if err != nil {
return "", fmt.Errorf("error generating parameters schema: %w", err)
}
return fmt.Sprintf("## Parameters\n\n```json\n%s\n```", string(schemaJSON)), nil
}
func generateFilteredConfig(toolsFile, toolName string) ([]byte, error) {
data, err := os.ReadFile(toolsFile)
if err != nil {
return nil, fmt.Errorf("error reading %s: %w", toolsFile, err)
}
var cfg Config
if err := yaml.Unmarshal(data, &cfg); err != nil {
return nil, fmt.Errorf("error parsing YAML from %s: %w", toolsFile, err)
}
if _, ok := cfg.Tools[toolName]; !ok {
return nil, nil // Tool not found in this file
}
filteredCfg := Config{
Tools: map[string]map[string]interface{}{
toolName: cfg.Tools[toolName],
},
}
// Add relevant source if exists
if src, ok := cfg.Tools[toolName]["source"].(string); ok && src != "" {
if sourceData, exists := cfg.Sources[src]; exists {
if filteredCfg.Sources == nil {
filteredCfg.Sources = make(map[string]interface{})
}
filteredCfg.Sources[src] = sourceData
}
}
filteredData, err := yaml.Marshal(filteredCfg)
if err != nil {
return nil, fmt.Errorf("error marshaling filtered tools for %s: %w", toolName, err)
}
return filteredData, nil
}

View File

@@ -0,0 +1,300 @@
// Copyright 2026 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package skills
import (
"os"
"path/filepath"
"strings"
"testing"
"github.com/goccy/go-yaml"
"github.com/google/go-cmp/cmp"
)
func TestFormatParameters(t *testing.T) {
tests := []struct {
name string
params []Parameter
wantContains []string
wantErr bool
}{
{
name: "empty parameters",
params: []Parameter{},
wantContains: []string{""},
},
{
name: "single required string parameter",
params: []Parameter{
{
Name: "param1",
Description: "A test parameter",
Type: "string",
Required: true,
},
},
wantContains: []string{
"## Parameters",
"```json",
`"type": "object"`,
`"properties": {`,
`"param1": {`,
`"type": "string"`,
`"description": "A test parameter"`,
`"required": [`,
`"param1"`,
},
},
{
name: "mixed parameters with defaults",
params: []Parameter{
{
Name: "param1",
Description: "Param 1",
Type: "string",
Required: true,
},
{
Name: "param2",
Description: "Param 2",
Type: "integer",
Default: 42,
Required: false,
},
},
wantContains: []string{
`"param1": {`,
`"param2": {`,
`"default": 42`,
`"required": [`,
`"param1"`,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := formatParameters(tt.params)
if (err != nil) != tt.wantErr {
t.Errorf("formatParameters() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
if len(tt.params) == 0 {
if got != "" {
t.Errorf("formatParameters() = %v, want empty string", got)
}
return
}
for _, want := range tt.wantContains {
if !strings.Contains(got, want) {
t.Errorf("formatParameters() result missing expected string: %s\nGot:\n%s", want, got)
}
}
})
}
}
func TestGenerateSkillMarkdown(t *testing.T) {
tools := map[string]Tool{
"tool1": {
Name: "tool1",
Description: "First tool",
Parameters: []Parameter{
{Name: "p1", Type: "string", Description: "d1", Required: true},
},
},
}
got, err := generateSkillMarkdown("MySkill", "My Description", tools)
if err != nil {
t.Fatalf("generateSkillMarkdown() error = %v", err)
}
expectedSubstrings := []string{
"name: MySkill",
"description: My Description",
"# tool1",
"First tool",
"## Parameters",
"node scripts/tool1.js '{\"<param_name>\": \"<param_value>\"}'",
}
for _, s := range expectedSubstrings {
if !strings.Contains(got, s) {
t.Errorf("generateSkillMarkdown() missing substring %q", s)
}
}
}
func TestGenerateShellScriptContent(t *testing.T) {
tests := []struct {
name string
toolName string
config serverConfig
toolsFileName string
wantContains []string
}{
{
name: "basic script",
toolName: "test-tool",
config: serverConfig{
prebuiltConfigs: []string{},
},
toolsFileName: "",
wantContains: []string{
`const toolName = "test-tool";`,
`const prebuiltNames = [];`,
`const toolsFileName = "";`,
`const toolboxArgs = [...configArgs, "invoke", toolName, ...args];`,
},
},
{
name: "script with prebuilts and tools file",
toolName: "complex-tool",
config: serverConfig{
prebuiltConfigs: []string{"pre1", "pre2"},
},
toolsFileName: "tools.yaml",
wantContains: []string{
`const toolName = "complex-tool";`,
`const prebuiltNames = ["pre1","pre2"];`,
`const toolsFileName = "tools.yaml";`,
`configArgs.push("--prebuilt", name);`,
`configArgs.push("--tools-file", path.join(__dirname, "..", "assets", toolsFileName));`,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := generateShellScriptContent(tt.toolName, tt.config, tt.toolsFileName)
if err != nil {
t.Fatalf("generateShellScriptContent() error = %v", err)
}
for _, s := range tt.wantContains {
if !strings.Contains(got, s) {
t.Errorf("generateShellScriptContent() missing substring %q\nGot:\n%s", s, got)
}
}
})
}
}
func TestGenerateFilteredConfig(t *testing.T) {
// Setup temporary directory and file
tmpDir := t.TempDir()
toolsFile := filepath.Join(tmpDir, "tools.yaml")
configContent := `
sources:
src1:
type: "postgres"
connection_string: "conn1"
src2:
type: "mysql"
connection_string: "conn2"
tools:
tool1:
source: "src1"
query: "SELECT 1"
tool2:
source: "src2"
query: "SELECT 2"
tool3:
type: "http" # No source
`
if err := os.WriteFile(toolsFile, []byte(configContent), 0644); err != nil {
t.Fatalf("Failed to create temp tools file: %v", err)
}
tests := []struct {
name string
toolName string
wantCfg Config
wantErr bool
wantNil bool
}{
{
name: "tool with source",
toolName: "tool1",
wantCfg: Config{
Sources: map[string]interface{}{
"src1": map[string]interface{}{
"type": "postgres",
"connection_string": "conn1",
},
},
Tools: map[string]map[string]interface{}{
"tool1": {
"source": "src1",
"query": "SELECT 1",
},
},
},
},
{
name: "tool without source",
toolName: "tool3",
wantCfg: Config{
Tools: map[string]map[string]interface{}{
"tool3": {
"type": "http",
},
},
},
},
{
name: "non-existent tool",
toolName: "missing-tool",
wantNil: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
gotBytes, err := generateFilteredConfig(toolsFile, tt.toolName)
if (err != nil) != tt.wantErr {
t.Errorf("generateFilteredConfig() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.wantErr {
return
}
if tt.wantNil {
if gotBytes != nil {
t.Errorf("generateFilteredConfig() expected nil, got %s", string(gotBytes))
}
return
}
var gotCfg Config
if err := yaml.Unmarshal(gotBytes, &gotCfg); err != nil {
t.Errorf("Failed to unmarshal result: %v", err)
}
if diff := cmp.Diff(tt.wantCfg, gotCfg); diff != "" {
t.Errorf("generateFilteredConfig() mismatch (-want +got):\n%s", diff)
}
})
}
}