mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
489f2d3bd0 | ||
|
|
65e17de065 | ||
|
|
79ff5d80b3 | ||
|
|
2a52141d2f | ||
|
|
76ad59fd7d | ||
|
|
c32c1cb917 | ||
|
|
58a3ae2aa4 | ||
|
|
50e74f75ef | ||
|
|
60652e621c | ||
|
|
8863f1132a | ||
|
|
d6c1bc2fef | ||
|
|
d93a6f57bc | ||
|
|
df581c3efb | ||
|
|
595c4c3613 | ||
|
|
f16d17ba49 | ||
|
|
e6fefc863c | ||
|
|
5fba724818 | ||
|
|
60b80ec172 | ||
|
|
af4be770a1 | ||
|
|
f330fe22a2 | ||
|
|
efc868263a | ||
|
|
56044776d5 | ||
|
|
04f1d015f3 | ||
|
|
3422f64c5f | ||
|
|
ccb5f1e690 |
@@ -1,4 +1,4 @@
|
||||
FROM oven/bun:1.3.11-alpine
|
||||
FROM oven/bun:1.3.13-alpine
|
||||
|
||||
# Install necessary packages for development
|
||||
RUN apk add --no-cache \
|
||||
|
||||
259
.github/CONTRIBUTING.md
vendored
259
.github/CONTRIBUTING.md
vendored
@@ -2,8 +2,15 @@
|
||||
|
||||
Thank you for your interest in contributing to Sim! Our goal is to provide developers with a powerful, user-friendly platform for building, testing, and optimizing agentic workflows. We welcome contributions in all forms—from bug fixes and design improvements to brand-new features.
|
||||
|
||||
> **Project Overview:**
|
||||
> Sim is a monorepo using Turborepo, containing the main application (`apps/sim/`), documentation (`apps/docs/`), and shared packages (`packages/`). The main application is built with Next.js (app router), ReactFlow, Zustand, Shadcn, and Tailwind CSS. Please ensure your contributions follow our best practices for clarity, maintainability, and consistency.
|
||||
> **Project Overview:**
|
||||
> Sim is a Turborepo monorepo with two deployable apps and a set of shared packages:
|
||||
>
|
||||
> - `apps/sim/` — the main Next.js application (App Router, ReactFlow, Zustand, Shadcn, Tailwind CSS).
|
||||
> - `apps/realtime/` — a small Bun + Socket.IO server that powers the collaborative canvas. Shares DB and Better Auth secrets with `apps/sim` via `@sim/*` packages.
|
||||
> - `apps/docs/` — Fumadocs-based documentation site.
|
||||
> - `packages/` — shared workspace packages (`@sim/db`, `@sim/auth`, `@sim/audit`, `@sim/workflow-types`, `@sim/workflow-persistence`, `@sim/workflow-authz`, `@sim/realtime-protocol`, `@sim/security`, `@sim/logger`, `@sim/utils`, `@sim/testing`, `@sim/tsconfig`).
|
||||
>
|
||||
> Strict one-way dependency flow: `apps/* → packages/*`. Packages never import from apps. Please ensure your contributions follow this and our best practices for clarity, maintainability, and consistency.
|
||||
|
||||
---
|
||||
|
||||
@@ -24,14 +31,17 @@ Thank you for your interest in contributing to Sim! Our goal is to provide devel
|
||||
|
||||
We strive to keep our workflow as simple as possible. To contribute:
|
||||
|
||||
1. **Fork the Repository**
|
||||
1. **Fork the Repository**
|
||||
Click the **Fork** button on GitHub to create your own copy of the project.
|
||||
|
||||
2. **Clone Your Fork**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
```
|
||||
3. **Create a Feature Branch**
|
||||
|
||||
3. **Create a Feature Branch**
|
||||
Create a new branch with a descriptive name:
|
||||
|
||||
```bash
|
||||
@@ -40,21 +50,23 @@ We strive to keep our workflow as simple as possible. To contribute:
|
||||
|
||||
Use a clear naming convention to indicate the type of work (e.g., `feat/`, `fix/`, `docs/`).
|
||||
|
||||
4. **Make Your Changes**
|
||||
4. **Make Your Changes**
|
||||
Ensure your changes are small, focused, and adhere to our coding guidelines.
|
||||
|
||||
5. **Commit Your Changes**
|
||||
5. **Commit Your Changes**
|
||||
Write clear, descriptive commit messages that follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/#specification) specification. This allows us to maintain a coherent project history and generate changelogs automatically. For example:
|
||||
|
||||
- `feat(api): add new endpoint for user authentication`
|
||||
- `fix(ui): resolve button alignment issue`
|
||||
- `docs: update contribution guidelines`
|
||||
|
||||
6. **Push Your Branch**
|
||||
|
||||
```bash
|
||||
git push origin feat/your-feature-name
|
||||
```
|
||||
|
||||
7. **Create a Pull Request**
|
||||
7. **Create a Pull Request**
|
||||
Open a pull request against the `staging` branch on GitHub. Please provide a clear description of the changes and reference any relevant issues (e.g., `fixes #123`).
|
||||
|
||||
---
|
||||
@@ -65,7 +77,7 @@ If you discover a bug or have a feature request, please open an issue in our Git
|
||||
|
||||
- Provide a clear, descriptive title.
|
||||
- Include as many details as possible (steps to reproduce, screenshots, etc.).
|
||||
- **Tag Your Issue Appropriately:**
|
||||
- **Tag Your Issue Appropriately:**
|
||||
Use the following labels to help us categorize your issue:
|
||||
- **active:** Actively working on it right now.
|
||||
- **bug:** Something isn't working.
|
||||
@@ -82,12 +94,11 @@ If you discover a bug or have a feature request, please open an issue in our Git
|
||||
|
||||
Before creating a pull request:
|
||||
|
||||
- **Ensure Your Branch Is Up-to-Date:**
|
||||
- **Ensure Your Branch Is Up-to-Date:**
|
||||
Rebase your branch onto the latest `staging` branch to prevent merge conflicts.
|
||||
- **Follow the Guidelines:**
|
||||
- **Follow the Guidelines:**
|
||||
Make sure your changes are well-tested, follow our coding standards, and include relevant documentation if necessary.
|
||||
|
||||
- **Reference Issues:**
|
||||
- **Reference Issues:**
|
||||
If your PR addresses an existing issue, include `refs #<issue-number>` or `fixes #<issue-number>` in your PR description.
|
||||
|
||||
Our maintainers will review your pull request and provide feedback. We aim to make the review process as smooth and timely as possible.
|
||||
@@ -166,27 +177,27 @@ To use local models with Sim:
|
||||
|
||||
1. Install Ollama and pull models:
|
||||
|
||||
```bash
|
||||
# Install Ollama (if not already installed)
|
||||
curl -fsSL https://ollama.ai/install.sh | sh
|
||||
```bash
|
||||
# Install Ollama (if not already installed)
|
||||
curl -fsSL https://ollama.ai/install.sh | sh
|
||||
|
||||
# Pull a model (e.g., gemma3:4b)
|
||||
ollama pull gemma3:4b
|
||||
```
|
||||
# Pull a model (e.g., gemma3:4b)
|
||||
ollama pull gemma3:4b
|
||||
```
|
||||
|
||||
2. Start Sim with local model support:
|
||||
|
||||
```bash
|
||||
# With NVIDIA GPU support
|
||||
docker compose --profile local-gpu -f docker-compose.ollama.yml up -d
|
||||
```bash
|
||||
# With NVIDIA GPU support
|
||||
docker compose --profile local-gpu -f docker-compose.ollama.yml up -d
|
||||
|
||||
# Without GPU (CPU only)
|
||||
docker compose --profile local-cpu -f docker-compose.ollama.yml up -d
|
||||
# Without GPU (CPU only)
|
||||
docker compose --profile local-cpu -f docker-compose.ollama.yml up -d
|
||||
|
||||
# If hosting on a server, update the environment variables in the docker-compose.prod.yml file
|
||||
# to include the server's public IP then start again (OLLAMA_URL to i.e. http://1.1.1.1:11434)
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
# If hosting on a server, update the environment variables in the docker-compose.prod.yml file
|
||||
# to include the server's public IP then start again (OLLAMA_URL to i.e. http://1.1.1.1:11434)
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### Option 3: Using VS Code / Cursor Dev Containers
|
||||
|
||||
@@ -201,61 +212,104 @@ Dev Containers provide a consistent and easy-to-use development environment:
|
||||
2. **Setup Steps:**
|
||||
|
||||
- Clone the repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
```
|
||||
- Open the project in VS Code/Cursor
|
||||
- When prompted, click "Reopen in Container" (or press F1 and select "Remote-Containers: Reopen in Container")
|
||||
- Wait for the container to build and initialize
|
||||
|
||||
- Open the project in VS Code/Cursor.
|
||||
- When prompted, click "Reopen in Container" (or press F1 and select "Remote-Containers: Reopen in Container").
|
||||
- Wait for the container to build and initialize.
|
||||
|
||||
3. **Start Developing:**
|
||||
|
||||
- Run `bun run dev:full` in the terminal or use the `sim-start` alias
|
||||
- This starts both the main application and the realtime socket server
|
||||
- All dependencies and configurations are automatically set up
|
||||
- Your changes will be automatically hot-reloaded
|
||||
- Run `bun run dev:full` in the terminal or use the `sim-start` alias.
|
||||
- This starts both the main application and the realtime socket server.
|
||||
- All dependencies and configurations are automatically set up.
|
||||
- Your changes will be automatically hot-reloaded.
|
||||
|
||||
4. **GitHub Codespaces:**
|
||||
- This setup also works with GitHub Codespaces if you prefer development in the browser
|
||||
- Just click "Code" → "Codespaces" → "Create codespace on staging"
|
||||
|
||||
- This setup also works with GitHub Codespaces if you prefer development in the browser.
|
||||
- Just click "Code" → "Codespaces" → "Create codespace on staging".
|
||||
|
||||
### Option 4: Manual Setup
|
||||
|
||||
If you prefer not to use Docker or Dev Containers:
|
||||
If you prefer not to use Docker or Dev Containers. **All commands run from the repository root unless explicitly noted.**
|
||||
|
||||
1. **Clone and Install:**
|
||||
|
||||
1. **Clone the Repository:**
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
bun install
|
||||
```
|
||||
|
||||
2. **Set Up Environment:**
|
||||
Bun workspaces handle dependency resolution for all apps and packages from the root `bun install`.
|
||||
|
||||
- Navigate to the app directory:
|
||||
```bash
|
||||
cd apps/sim
|
||||
```
|
||||
- Copy `.env.example` to `.env`
|
||||
- Configure required variables (DATABASE_URL, BETTER_AUTH_SECRET, BETTER_AUTH_URL)
|
||||
2. **Set Up Environment Files:**
|
||||
|
||||
3. **Set Up Database:**
|
||||
We use **per-app `.env` files** (the Turborepo-canonical pattern), not a single root `.env`. Three files are needed for local dev:
|
||||
|
||||
```bash
|
||||
bunx drizzle-kit push
|
||||
# Main app — large, app-specific (OAuth secrets, LLM keys, Stripe, etc.)
|
||||
cp apps/sim/.env.example apps/sim/.env
|
||||
|
||||
# Realtime server — small, only the values shared with the main app
|
||||
cp apps/realtime/.env.example apps/realtime/.env
|
||||
|
||||
# DB tooling (drizzle-kit, db:migrate)
|
||||
cp packages/db/.env.example packages/db/.env
|
||||
```
|
||||
|
||||
4. **Run the Development Server:**
|
||||
At minimum, each `.env` needs `DATABASE_URL`. `apps/sim/.env` and `apps/realtime/.env` additionally need matching values for `BETTER_AUTH_URL`, `BETTER_AUTH_SECRET`, `INTERNAL_API_SECRET`, and `NEXT_PUBLIC_APP_URL`. `apps/sim/.env` also needs `ENCRYPTION_KEY` and `API_ENCRYPTION_KEY`. Generate any 32-char secrets with `openssl rand -hex 32`.
|
||||
|
||||
The same `BETTER_AUTH_SECRET`, `INTERNAL_API_SECRET`, and `DATABASE_URL` must appear in both `apps/sim/.env` and `apps/realtime/.env` so the two services share auth and DB. After editing `apps/sim/.env`, you can mirror the shared subset into the realtime env in one shot:
|
||||
|
||||
```bash
|
||||
grep -E '^(DATABASE_URL|BETTER_AUTH_URL|BETTER_AUTH_SECRET|INTERNAL_API_SECRET|NEXT_PUBLIC_APP_URL|REDIS_URL)=' apps/sim/.env > apps/realtime/.env
|
||||
grep -E '^DATABASE_URL=' apps/sim/.env > packages/db/.env
|
||||
```
|
||||
|
||||
3. **Run Database Migrations:**
|
||||
|
||||
Migrations live in `packages/db/migrations/`. Run them via the dedicated workspace script:
|
||||
|
||||
```bash
|
||||
cd packages/db && bun run db:migrate && cd ../..
|
||||
```
|
||||
|
||||
For ad-hoc schema iteration during development you can also use `bun run db:push` from `packages/db`, but `db:migrate` is the canonical command for both local and CI/CD setups.
|
||||
|
||||
4. **Run the Development Servers:**
|
||||
|
||||
```bash
|
||||
bun run dev:full
|
||||
```
|
||||
|
||||
This command starts both the main application and the realtime socket server required for full functionality.
|
||||
This launches both apps with coloured prefixes:
|
||||
|
||||
- `[App]` — Next.js on `http://localhost:3000`
|
||||
- `[Realtime]` — Socket.IO on `http://localhost:3002`
|
||||
|
||||
Or run them separately:
|
||||
|
||||
```bash
|
||||
bun run dev # Next.js app only
|
||||
bun run dev:sockets # realtime server only
|
||||
```
|
||||
|
||||
5. **Make Your Changes and Test Locally.**
|
||||
|
||||
Before opening a PR, run the same checks CI runs:
|
||||
|
||||
```bash
|
||||
bun run type-check # TypeScript across every workspace
|
||||
bun run lint:check # Biome lint across every workspace
|
||||
bun run test # Vitest across every workspace
|
||||
```
|
||||
|
||||
### Email Template Development
|
||||
|
||||
When working on email templates, you can preview them using a local email preview server:
|
||||
@@ -263,18 +317,19 @@ When working on email templates, you can preview them using a local email previe
|
||||
1. **Run the Email Preview Server:**
|
||||
|
||||
```bash
|
||||
bun run email:dev
|
||||
cd apps/sim && bun run email:dev
|
||||
```
|
||||
|
||||
2. **Access the Preview:**
|
||||
|
||||
- Open `http://localhost:3000` in your browser
|
||||
- You'll see a list of all email templates
|
||||
- Click on any template to view and test it with various parameters
|
||||
- Open `http://localhost:3000` in your browser.
|
||||
- You'll see a list of all email templates.
|
||||
- Click on any template to view and test it with various parameters.
|
||||
|
||||
3. **Templates Location:**
|
||||
- Email templates are located in `sim/app/emails/`
|
||||
- After making changes to templates, they will automatically update in the preview
|
||||
|
||||
- Email templates live in `apps/sim/components/emails/`.
|
||||
- Changes hot-reload automatically in the preview.
|
||||
|
||||
---
|
||||
|
||||
@@ -282,28 +337,41 @@ When working on email templates, you can preview them using a local email previe
|
||||
|
||||
Sim is built in a modular fashion where blocks and tools extend the platform's functionality. To maintain consistency and quality, please follow the guidelines below when adding a new block or tool.
|
||||
|
||||
> **Use the skill guides for step-by-step recipes.** The repository ships opinionated, end-to-end guides under `.agents/skills/` that cover the exact file layout, conventions, registry wiring, and gotchas for each kind of contribution. Read the relevant SKILL.md before you start writing code:
|
||||
>
|
||||
> | Adding… | Read |
|
||||
> | ------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- |
|
||||
> | A new integration end-to-end (tools + block + icon + optional triggers + all registrations) | [`.agents/skills/add-integration/SKILL.md`](../.agents/skills/add-integration/SKILL.md) |
|
||||
> | Just a block (or aligning an existing block with its tools) | [`.agents/skills/add-block/SKILL.md`](../.agents/skills/add-block/SKILL.md) |
|
||||
> | Just tool configs for a service | [`.agents/skills/add-tools/SKILL.md`](../.agents/skills/add-tools/SKILL.md) |
|
||||
> | A webhook trigger for a service | [`.agents/skills/add-trigger/SKILL.md`](../.agents/skills/add-trigger/SKILL.md) |
|
||||
> | A knowledge-base connector (sync docs from an external source) | [`.agents/skills/add-connector/SKILL.md`](../.agents/skills/add-connector/SKILL.md) |
|
||||
>
|
||||
> The shorter overview below is a high-level reference; the SKILL.md files are the authoritative source of truth and stay in sync with the codebase.
|
||||
|
||||
### Where to Add Your Code
|
||||
|
||||
- **Blocks:** Create your new block file under the `/apps/sim/blocks/blocks` directory. The name of the file should match the provider name (e.g., `pinecone.ts`).
|
||||
- **Tools:** Create a new directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`).
|
||||
- **Blocks:** Create your new block file under the `apps/sim/blocks/blocks/` directory. The name of the file should match the provider name (e.g., `pinecone.ts`).
|
||||
- **Tools:** Create a new directory under `apps/sim/tools/` with the same name as the provider (e.g., `apps/sim/tools/pinecone`).
|
||||
|
||||
In addition, you will need to update the registries:
|
||||
|
||||
- **Block Registry:** Update the blocks index (`/apps/sim/blocks/index.ts`) to include your new block.
|
||||
- **Tool Registry:** Update the tools registry (`/apps/sim/tools/index.ts`) to add your new tool.
|
||||
- **Block Registry:** Add your block to `apps/sim/blocks/registry.ts`. (`apps/sim/blocks/index.ts` re-exports lookups from the registry; you do not need to edit it.)
|
||||
- **Tool Registry:** Add your tool to `apps/sim/tools/index.ts`.
|
||||
|
||||
### How to Create a New Block
|
||||
|
||||
1. **Create a New File:**
|
||||
Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `/apps/sim/blocks/blocks` directory.
|
||||
1. **Create a New File:**
|
||||
Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `apps/sim/blocks/blocks/` directory.
|
||||
|
||||
2. **Create a New Icon:**
|
||||
Create a new icon for your block in the `/apps/sim/components/icons.tsx` file. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`).
|
||||
Create a new icon for your block in `apps/sim/components/icons.tsx`. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`).
|
||||
|
||||
3. **Define the Block Configuration:**
|
||||
3. **Define the Block Configuration:**
|
||||
Your block should export a constant of type `BlockConfig`. For example:
|
||||
|
||||
```typescript:/apps/sim/blocks/blocks/pinecone.ts
|
||||
```typescript
|
||||
// apps/sim/blocks/blocks/pinecone.ts
|
||||
import { PineconeIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { PineconeResponse } from '@/tools/pinecone/types'
|
||||
@@ -321,7 +389,7 @@ In addition, you will need to update the registries:
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown'
|
||||
type: 'dropdown',
|
||||
required: true,
|
||||
options: [
|
||||
{ label: 'Generate Embeddings', id: 'generate' },
|
||||
@@ -332,7 +400,7 @@ In addition, you will need to update the registries:
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input'
|
||||
type: 'short-input',
|
||||
placeholder: 'Your Pinecone API key',
|
||||
password: true,
|
||||
required: true,
|
||||
@@ -370,10 +438,11 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
4. **Register Your Block:**
|
||||
Add your block to the blocks registry (`/apps/sim/blocks/registry.ts`):
|
||||
4. **Register Your Block:**
|
||||
Add your block to the blocks registry (`apps/sim/blocks/registry.ts`):
|
||||
|
||||
```typescript:/apps/sim/blocks/registry.ts
|
||||
```typescript
|
||||
// apps/sim/blocks/registry.ts
|
||||
import { PineconeBlock } from '@/blocks/blocks/pinecone'
|
||||
|
||||
// Registry of all available blocks
|
||||
@@ -385,24 +454,25 @@ In addition, you will need to update the registries:
|
||||
|
||||
The block will be automatically available to the application through the registry.
|
||||
|
||||
5. **Test Your Block:**
|
||||
5. **Test Your Block:**
|
||||
Ensure that the block displays correctly in the UI and that its functionality works as expected.
|
||||
|
||||
### How to Create a New Tool
|
||||
|
||||
1. **Create a New Directory:**
|
||||
Create a directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`).
|
||||
1. **Create a New Directory:**
|
||||
Create a directory under `apps/sim/tools/` with the same name as the provider (e.g., `apps/sim/tools/pinecone`).
|
||||
|
||||
2. **Create Tool Files:**
|
||||
2. **Create Tool Files:**
|
||||
Create separate files for each tool functionality with descriptive names (e.g., `fetch.ts`, `generate_embeddings.ts`, `search_text.ts`) in your tool directory.
|
||||
|
||||
3. **Create a Types File:**
|
||||
3. **Create a Types File:**
|
||||
Create a `types.ts` file in your tool directory to define and export all types related to your tools.
|
||||
|
||||
4. **Create an Index File:**
|
||||
4. **Create an Index File:**
|
||||
Create an `index.ts` file in your tool directory that imports and exports all tools:
|
||||
|
||||
```typescript:/apps/sim/tools/pinecone/index.ts
|
||||
```typescript
|
||||
// apps/sim/tools/pinecone/index.ts
|
||||
import { fetchTool } from './fetch'
|
||||
import { generateEmbeddingsTool } from './generate_embeddings'
|
||||
import { searchTextTool } from './search_text'
|
||||
@@ -410,10 +480,11 @@ In addition, you will need to update the registries:
|
||||
export { fetchTool, generateEmbeddingsTool, searchTextTool }
|
||||
```
|
||||
|
||||
5. **Define the Tool Configuration:**
|
||||
5. **Define the Tool Configuration:**
|
||||
Your tool should export a constant with a naming convention of `{toolName}Tool`. The tool ID should follow the format `{provider}_{tool_name}`. For example:
|
||||
|
||||
```typescript:/apps/sim/tools/pinecone/fetch.ts
|
||||
```typescript
|
||||
// apps/sim/tools/pinecone/fetch.ts
|
||||
import { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
import { PineconeParams, PineconeResponse } from '@/tools/pinecone/types'
|
||||
|
||||
@@ -449,11 +520,12 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
6. **Register Your Tool:**
|
||||
Update the tools registry in `/apps/sim/tools/index.ts` to include your new tool:
|
||||
6. **Register Your Tool:**
|
||||
Update the tools registry in `apps/sim/tools/index.ts` to include your new tool:
|
||||
|
||||
```typescript:/apps/sim/tools/index.ts
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from '/@tools/pinecone'
|
||||
```typescript
|
||||
// apps/sim/tools/index.ts
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from '@/tools/pinecone'
|
||||
// ... other imports
|
||||
|
||||
export const tools: Record<string, ToolConfig> = {
|
||||
@@ -464,13 +536,14 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
7. **Test Your Tool:**
|
||||
7. **Test Your Tool:**
|
||||
Ensure that your tool functions correctly by making test requests and verifying the responses.
|
||||
|
||||
8. **Generate Documentation:**
|
||||
Run the documentation generator to create docs for your new tool:
|
||||
8. **Generate Documentation:**
|
||||
Run the documentation generator (from `apps/sim`) to create docs for your new tool:
|
||||
|
||||
```bash
|
||||
./scripts/generate-docs.sh
|
||||
cd apps/sim && bun run generate-docs
|
||||
```
|
||||
|
||||
### Naming Conventions
|
||||
@@ -480,7 +553,7 @@ Maintaining consistent naming across the codebase is critical for auto-generatio
|
||||
- **Block Files:** Name should match the provider (e.g., `pinecone.ts`)
|
||||
- **Block Export:** Should be named `{Provider}Block` (e.g., `PineconeBlock`)
|
||||
- **Icons:** Should be named `{Provider}Icon` (e.g., `PineconeIcon`)
|
||||
- **Tool Directories:** Should match the provider name (e.g., `/tools/pinecone/`)
|
||||
- **Tool Directories:** Should match the provider name (e.g., `tools/pinecone/`)
|
||||
- **Tool Files:** Should be named after their function (e.g., `fetch.ts`, `search_text.ts`)
|
||||
- **Tool Exports:** Should be named `{toolName}Tool` (e.g., `fetchTool`)
|
||||
- **Tool IDs:** Should follow the format `{provider}_{tool_name}` (e.g., `pinecone_fetch`)
|
||||
@@ -489,12 +562,12 @@ Maintaining consistent naming across the codebase is critical for auto-generatio
|
||||
|
||||
Sim implements a sophisticated parameter visibility system that controls how parameters are exposed to users and LLMs in agent workflows. Each parameter can have one of four visibility levels:
|
||||
|
||||
| Visibility | User Sees | LLM Sees | How It Gets Set |
|
||||
|-------------|-----------|----------|--------------------------------|
|
||||
| `user-only` | ✅ Yes | ❌ No | User provides in UI |
|
||||
| `user-or-llm` | ✅ Yes | ✅ Yes | User provides OR LLM generates |
|
||||
| `llm-only` | ❌ No | ✅ Yes | LLM generates only |
|
||||
| `hidden` | ❌ No | ❌ No | Application injects at runtime |
|
||||
| Visibility | User Sees | LLM Sees | How It Gets Set |
|
||||
| ------------- | --------- | -------- | ------------------------------ |
|
||||
| `user-only` | ✅ Yes | ❌ No | User provides in UI |
|
||||
| `user-or-llm` | ✅ Yes | ✅ Yes | User provides OR LLM generates |
|
||||
| `llm-only` | ❌ No | ✅ Yes | LLM generates only |
|
||||
| `hidden` | ❌ No | ❌ No | Application injects at runtime |
|
||||
|
||||
#### Visibility Guidelines
|
||||
|
||||
|
||||
2
.github/workflows/docs-embeddings.yml
vendored
2
.github/workflows/docs-embeddings.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
4
.github/workflows/i18n.yml
vendored
4
.github/workflows/i18n.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
|
||||
2
.github/workflows/migrations.yml
vendored
2
.github/workflows/migrations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
|
||||
2
.github/workflows/publish-cli.yml
vendored
2
.github/workflows/publish-cli.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node.js for npm publishing
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
2
.github/workflows/publish-ts-sdk.yml
vendored
2
.github/workflows/publish-ts-sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node.js for npm publishing
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
2
.github/workflows/test-build.yml
vendored
2
.github/workflows/test-build.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
@@ -38,7 +38,7 @@ Integrate Ashby into the workflow. Manage candidates (list, get, create, update,
|
||||
|
||||
### `ashby_add_candidate_tag`
|
||||
|
||||
Adds a tag to a candidate in Ashby.
|
||||
Adds a tag to a candidate in Ashby and returns the updated candidate.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -52,7 +52,37 @@ Adds a tag to a candidate in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the tag was successfully added |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_change_application_stage`
|
||||
|
||||
@@ -71,8 +101,37 @@ Moves an application to a different interview stage. Requires an archive reason
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applicationId` | string | Application UUID |
|
||||
| `stageId` | string | New interview stage UUID |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_application`
|
||||
|
||||
@@ -95,7 +154,37 @@ Creates a new application for a candidate on a job. Optionally specify interview
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applicationId` | string | Created application UUID |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_candidate`
|
||||
|
||||
@@ -107,7 +196,7 @@ Creates a new candidate record in Ashby.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `name` | string | Yes | The candidate full name |
|
||||
| `email` | string | Yes | Primary email address for the candidate |
|
||||
| `email` | string | No | Primary email address for the candidate |
|
||||
| `phoneNumber` | string | No | Primary phone number for the candidate |
|
||||
| `linkedInUrl` | string | No | LinkedIn profile URL |
|
||||
| `githubUrl` | string | No | GitHub profile URL |
|
||||
@@ -117,17 +206,37 @@ Creates a new candidate record in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Created candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_note`
|
||||
|
||||
@@ -147,7 +256,15 @@ Creates a note on a candidate in Ashby. Supports plain text and HTML content (bo
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `noteId` | string | Created note UUID |
|
||||
| `id` | string | Created note UUID |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `content` | string | Note content |
|
||||
| `author` | object | Author of the note |
|
||||
| ↳ `id` | string | Author user UUID |
|
||||
| ↳ `firstName` | string | Author first name |
|
||||
| ↳ `lastName` | string | Author last name |
|
||||
| ↳ `email` | string | Author email |
|
||||
|
||||
### `ashby_get_application`
|
||||
|
||||
@@ -164,28 +281,37 @@ Retrieves full details about a single application by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Application UUID |
|
||||
| `status` | string | Application status \(Active, Hired, Archived, Lead\) |
|
||||
| `candidate` | object | Associated candidate |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Candidate name |
|
||||
| `job` | object | Associated job |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| `currentInterviewStage` | object | Current interview stage |
|
||||
| ↳ `id` | string | Stage UUID |
|
||||
| ↳ `title` | string | Stage title |
|
||||
| ↳ `type` | string | Stage type |
|
||||
| `source` | object | Application source |
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| `archiveReason` | object | Reason for archival |
|
||||
| ↳ `id` | string | Reason UUID |
|
||||
| ↳ `text` | string | Reason text |
|
||||
| ↳ `reasonType` | string | Reason type |
|
||||
| `archivedAt` | string | ISO 8601 archive timestamp |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_candidate`
|
||||
|
||||
@@ -202,27 +328,37 @@ Retrieves full details about a single candidate by their ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `profileUrl` | string | URL to the candidate Ashby profile |
|
||||
| `position` | string | Current position or title |
|
||||
| `company` | string | Current company |
|
||||
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||
| `githubUrl` | string | GitHub profile URL |
|
||||
| `tags` | array | Tags applied to the candidate |
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| `applicationIds` | array | IDs of associated applications |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_job`
|
||||
|
||||
@@ -239,16 +375,37 @@ Retrieves full details about a single job by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Job UUID |
|
||||
| `title` | string | Job title |
|
||||
| `status` | string | Job status \(Open, Closed, Draft, Archived\) |
|
||||
| `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| `departmentId` | string | Department UUID |
|
||||
| `locationId` | string | Location UUID |
|
||||
| `descriptionPlain` | string | Job description in plain text |
|
||||
| `isArchived` | boolean | Whether the job is archived |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_job_posting`
|
||||
|
||||
@@ -260,6 +417,8 @@ Retrieves full details about a single job posting by its ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `jobPostingId` | string | Yes | The UUID of the job posting to fetch |
|
||||
| `expandApplicationFormDefinition` | boolean | No | Include application form definition in the response |
|
||||
| `expandSurveyFormDefinitions` | boolean | No | Include survey form definitions in the response |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -267,14 +426,56 @@ Retrieves full details about a single job posting by its ID.
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Job posting UUID |
|
||||
| `title` | string | Job posting title |
|
||||
| `jobId` | string | Associated job UUID |
|
||||
| `locationName` | string | Location name |
|
||||
| `descriptionPlain` | string | Full description in plain text |
|
||||
| `descriptionHtml` | string | Full description in HTML |
|
||||
| `descriptionSocial` | string | Shortened description for social sharing \(max 200 chars\) |
|
||||
| `descriptionParts` | object | Description broken into opening, body, and closing sections |
|
||||
| ↳ `descriptionOpening` | object | Opening \(from Job Boards theme settings\) |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| ↳ `descriptionBody` | object | Main description body |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| ↳ `descriptionClosing` | object | Closing \(from Job Boards theme settings\) |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| `departmentName` | string | Department name |
|
||||
| `employmentType` | string | Employment type \(e.g. FullTime, PartTime, Contract\) |
|
||||
| `descriptionPlain` | string | Job posting description in plain text |
|
||||
| `isListed` | boolean | Whether the posting is publicly listed |
|
||||
| `teamName` | string | Team name |
|
||||
| `teamNameHierarchy` | array | Hierarchy of team names from root to team |
|
||||
| `jobId` | string | Associated job UUID |
|
||||
| `locationName` | string | Primary location name |
|
||||
| `locationIds` | object | Primary and secondary location UUIDs |
|
||||
| ↳ `primaryLocationId` | string | Primary location UUID |
|
||||
| ↳ `secondaryLocationIds` | array | Secondary location UUIDs |
|
||||
| `address` | object | Postal address of the posting location |
|
||||
| ↳ `postalAddress` | object | Structured postal address |
|
||||
| ↳ `addressCountry` | string | Country |
|
||||
| ↳ `addressRegion` | string | State or region |
|
||||
| ↳ `addressLocality` | string | City or locality |
|
||||
| ↳ `postalCode` | string | Postal code |
|
||||
| ↳ `streetAddress` | string | Street address |
|
||||
| `isRemote` | boolean | Whether the posting is remote |
|
||||
| `workplaceType` | string | Workplace type \(OnSite, Remote, Hybrid\) |
|
||||
| `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| `isListed` | boolean | Whether publicly listed on the job board |
|
||||
| `suppressDescriptionOpening` | boolean | Whether the theme opening is hidden on this posting |
|
||||
| `suppressDescriptionClosing` | boolean | Whether the theme closing is hidden on this posting |
|
||||
| `publishedDate` | string | ISO 8601 published date |
|
||||
| `applicationDeadline` | string | ISO 8601 application deadline |
|
||||
| `externalLink` | string | External link to the job posting |
|
||||
| `applyLink` | string | Direct apply link |
|
||||
| `compensation` | object | Compensation details for the posting |
|
||||
| ↳ `compensationTierSummary` | string | Human-readable tier summary |
|
||||
| ↳ `summaryComponents` | array | Structured compensation components |
|
||||
| ↳ `summary` | string | Component summary |
|
||||
| ↳ `compensationTypeLabel` | string | Component type label \(Salary, Commission, Bonus, Equity, etc.\) |
|
||||
| ↳ `interval` | string | Payment interval \(e.g. annual, hourly\) |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `minValue` | number | Minimum value |
|
||||
| ↳ `maxValue` | number | Maximum value |
|
||||
| ↳ `shouldDisplayCompensationOnJobBoard` | boolean | Whether compensation is shown on the job board |
|
||||
| `applicationLimitCalloutHtml` | string | HTML callout shown when application limit is reached |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_get_offer`
|
||||
|
||||
@@ -291,20 +492,41 @@ Retrieves full details about a single offer by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Offer UUID |
|
||||
| `offerStatus` | string | Offer status \(e.g. WaitingOnCandidateResponse, CandidateAccepted\) |
|
||||
| `acceptanceStatus` | string | Acceptance status \(e.g. Accepted, Declined, Pending\) |
|
||||
| `applicationId` | string | Associated application UUID |
|
||||
| `startDate` | string | Offer start date |
|
||||
| `salary` | object | Salary details |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `value` | number | Salary amount |
|
||||
| `openingId` | string | Associated opening UUID |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp \(from latest version\) |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_list_applications`
|
||||
|
||||
Lists all applications in an Ashby organization with pagination and optional filters for status, job, candidate, and creation date.
|
||||
Lists all applications in an Ashby organization with pagination and optional filters for status, job, and creation date.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -315,7 +537,6 @@ Lists all applications in an Ashby organization with pagination and optional fil
|
||||
| `perPage` | number | No | Number of results per page \(default 100\) |
|
||||
| `status` | string | No | Filter by application status: Active, Hired, Archived, or Lead |
|
||||
| `jobId` | string | No | Filter applications by a specific job UUID |
|
||||
| `candidateId` | string | No | Filter applications by a specific candidate UUID |
|
||||
| `createdAfter` | string | No | Filter to applications created after this ISO 8601 timestamp \(e.g. 2024-01-01T00:00:00Z\) |
|
||||
|
||||
#### Output
|
||||
@@ -323,23 +544,6 @@ Lists all applications in an Ashby organization with pagination and optional fil
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applications` | array | List of applications |
|
||||
| ↳ `id` | string | Application UUID |
|
||||
| ↳ `status` | string | Application status \(Active, Hired, Archived, Lead\) |
|
||||
| ↳ `candidate` | object | Associated candidate |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Candidate name |
|
||||
| ↳ `job` | object | Associated job |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| ↳ `currentInterviewStage` | object | Current interview stage |
|
||||
| ↳ `id` | string | Stage UUID |
|
||||
| ↳ `title` | string | Stage title |
|
||||
| ↳ `type` | string | Stage type |
|
||||
| ↳ `source` | object | Application source |
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -352,6 +556,7 @@ Lists all archive reasons configured in Ashby.
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `includeArchived` | boolean | No | Whether to include archived archive reasons in the response \(default false\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -360,7 +565,7 @@ Lists all archive reasons configured in Ashby.
|
||||
| `archiveReasons` | array | List of archive reasons |
|
||||
| ↳ `id` | string | Archive reason UUID |
|
||||
| ↳ `text` | string | Archive reason text |
|
||||
| ↳ `reasonType` | string | Reason type |
|
||||
| ↳ `reasonType` | string | Reason type \(RejectedByCandidate, RejectedByOrg, Other\) |
|
||||
| ↳ `isArchived` | boolean | Whether the reason is archived |
|
||||
|
||||
### `ashby_list_candidate_tags`
|
||||
@@ -372,6 +577,10 @@ Lists all candidate tags configured in Ashby.
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `includeArchived` | boolean | No | Whether to include archived candidate tags \(default false\) |
|
||||
| `cursor` | string | No | Opaque pagination cursor from a previous response nextCursor value |
|
||||
| `syncToken` | string | No | Sync token from a previous response to fetch only changed results |
|
||||
| `perPage` | number | No | Number of results per page \(default 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -381,6 +590,9 @@ Lists all candidate tags configured in Ashby.
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| ↳ `isArchived` | boolean | Whether the tag is archived |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
| `syncToken` | string | Sync token to use for incremental updates in future requests |
|
||||
|
||||
### `ashby_list_candidates`
|
||||
|
||||
@@ -399,18 +611,6 @@ Lists all candidates in an Ashby organization with cursor-based pagination.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `candidates` | array | List of candidates |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Full name |
|
||||
| ↳ `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| ↳ `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -431,9 +631,15 @@ Lists all custom field definitions configured in Ashby.
|
||||
| `customFields` | array | List of custom field definitions |
|
||||
| ↳ `id` | string | Custom field UUID |
|
||||
| ↳ `title` | string | Custom field title |
|
||||
| ↳ `fieldType` | string | Field type \(e.g. String, Number, Boolean\) |
|
||||
| ↳ `objectType` | string | Object type the field applies to \(e.g. Candidate, Application, Job\) |
|
||||
| ↳ `isPrivate` | boolean | Whether the custom field is private |
|
||||
| ↳ `fieldType` | string | Field data type \(MultiValueSelect, NumberRange, String, Date, ValueSelect, Number, Currency, Boolean, LongText, CompensationRange\) |
|
||||
| ↳ `objectType` | string | Object type the field applies to \(Application, Candidate, Employee, Job, Offer, Opening, Talent_Project\) |
|
||||
| ↳ `isArchived` | boolean | Whether the custom field is archived |
|
||||
| ↳ `isRequired` | boolean | Whether a value is required |
|
||||
| ↳ `selectableValues` | array | Selectable values for MultiValueSelect fields \(empty for other field types\) |
|
||||
| ↳ `label` | string | Display label |
|
||||
| ↳ `value` | string | Stored value |
|
||||
| ↳ `isArchived` | boolean | Whether archived |
|
||||
|
||||
### `ashby_list_departments`
|
||||
|
||||
@@ -452,8 +658,11 @@ Lists all departments in Ashby.
|
||||
| `departments` | array | List of departments |
|
||||
| ↳ `id` | string | Department UUID |
|
||||
| ↳ `name` | string | Department name |
|
||||
| ↳ `externalName` | string | Candidate-facing name used on job boards |
|
||||
| ↳ `isArchived` | boolean | Whether the department is archived |
|
||||
| ↳ `parentId` | string | Parent department UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_list_interviews`
|
||||
|
||||
@@ -475,10 +684,24 @@ Lists interview schedules in Ashby, optionally filtered by application or interv
|
||||
| --------- | ---- | ----------- |
|
||||
| `interviewSchedules` | array | List of interview schedules |
|
||||
| ↳ `id` | string | Interview schedule UUID |
|
||||
| ↳ `status` | string | Schedule status \(NeedsScheduling, WaitingOnCandidateBooking, Scheduled, Complete, Cancelled, OnHold, etc.\) |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `interviewStageId` | string | Interview stage UUID |
|
||||
| ↳ `status` | string | Schedule status |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| ↳ `interviewEvents` | array | Scheduled interview events on this schedule |
|
||||
| ↳ `id` | string | Event UUID |
|
||||
| ↳ `interviewId` | string | Interview template UUID |
|
||||
| ↳ `interviewScheduleId` | string | Parent schedule UUID |
|
||||
| ↳ `interviewerUserIds` | array | User UUIDs of interviewers assigned to the event |
|
||||
| ↳ `createdAt` | string | Event creation timestamp |
|
||||
| ↳ `updatedAt` | string | Event last updated timestamp |
|
||||
| ↳ `startTime` | string | Event start time |
|
||||
| ↳ `endTime` | string | Event end time |
|
||||
| ↳ `feedbackLink` | string | URL to submit feedback for the event |
|
||||
| ↳ `location` | string | Physical location |
|
||||
| ↳ `meetingLink` | string | Virtual meeting URL |
|
||||
| ↳ `hasSubmittedFeedback` | boolean | Whether any feedback has been submitted |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -500,11 +723,22 @@ Lists all job postings in Ashby.
|
||||
| ↳ `id` | string | Job posting UUID |
|
||||
| ↳ `title` | string | Job posting title |
|
||||
| ↳ `jobId` | string | Associated job UUID |
|
||||
| ↳ `locationName` | string | Location name |
|
||||
| ↳ `departmentName` | string | Department name |
|
||||
| ↳ `employmentType` | string | Employment type \(e.g. FullTime, PartTime, Contract\) |
|
||||
| ↳ `teamName` | string | Team name |
|
||||
| ↳ `locationName` | string | Primary location display name |
|
||||
| ↳ `locationIds` | object | Primary and secondary location UUIDs |
|
||||
| ↳ `primaryLocationId` | string | Primary location UUID |
|
||||
| ↳ `secondaryLocationIds` | array | Secondary location UUIDs |
|
||||
| ↳ `workplaceType` | string | Workplace type \(OnSite, Remote, Hybrid\) |
|
||||
| ↳ `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| ↳ `isListed` | boolean | Whether the posting is publicly listed |
|
||||
| ↳ `publishedDate` | string | ISO 8601 published date |
|
||||
| ↳ `applicationDeadline` | string | ISO 8601 application deadline |
|
||||
| ↳ `externalLink` | string | External link to the job posting |
|
||||
| ↳ `applyLink` | string | Direct apply link for the job posting |
|
||||
| ↳ `compensationTierSummary` | string | Compensation tier summary for job boards |
|
||||
| ↳ `shouldDisplayCompensationOnJobBoard` | boolean | Whether compensation is shown on the job board |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_list_jobs`
|
||||
|
||||
@@ -524,14 +758,6 @@ Lists all jobs in an Ashby organization. By default returns Open, Closed, and Ar
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `jobs` | array | List of jobs |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| ↳ `status` | string | Job status \(Open, Closed, Archived, Draft\) |
|
||||
| ↳ `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| ↳ `departmentId` | string | Department UUID |
|
||||
| ↳ `locationId` | string | Location UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -552,12 +778,18 @@ Lists all locations configured in Ashby.
|
||||
| `locations` | array | List of locations |
|
||||
| ↳ `id` | string | Location UUID |
|
||||
| ↳ `name` | string | Location name |
|
||||
| ↳ `externalName` | string | Candidate-facing name used on job boards |
|
||||
| ↳ `isArchived` | boolean | Whether the location is archived |
|
||||
| ↳ `isRemote` | boolean | Whether this is a remote location |
|
||||
| ↳ `address` | object | Location address |
|
||||
| ↳ `city` | string | City |
|
||||
| ↳ `region` | string | State or region |
|
||||
| ↳ `country` | string | Country |
|
||||
| ↳ `isRemote` | boolean | Whether the location is remote \(use workplaceType instead\) |
|
||||
| ↳ `workplaceType` | string | Workplace type \(OnSite, Hybrid, Remote\) |
|
||||
| ↳ `parentLocationId` | string | Parent location UUID |
|
||||
| ↳ `type` | string | Location component type \(Location, LocationHierarchy\) |
|
||||
| ↳ `address` | object | Location postal address |
|
||||
| ↳ `addressCountry` | string | Country |
|
||||
| ↳ `addressRegion` | string | State or region |
|
||||
| ↳ `addressLocality` | string | City or locality |
|
||||
| ↳ `postalCode` | string | Postal code |
|
||||
| ↳ `streetAddress` | string | Street address |
|
||||
|
||||
### `ashby_list_notes`
|
||||
|
||||
@@ -579,6 +811,7 @@ Lists all notes on a candidate with pagination support.
|
||||
| `notes` | array | List of notes on the candidate |
|
||||
| ↳ `id` | string | Note UUID |
|
||||
| ↳ `content` | string | Note content |
|
||||
| ↳ `isPrivate` | boolean | Whether the note is private |
|
||||
| ↳ `author` | object | Note author |
|
||||
| ↳ `id` | string | Author user UUID |
|
||||
| ↳ `firstName` | string | First name |
|
||||
@@ -605,16 +838,6 @@ Lists all offers with their latest version in an Ashby organization.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `offers` | array | List of offers |
|
||||
| ↳ `id` | string | Offer UUID |
|
||||
| ↳ `offerStatus` | string | Offer status |
|
||||
| ↳ `acceptanceStatus` | string | Acceptance status |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `startDate` | string | Offer start date |
|
||||
| ↳ `salary` | object | Salary details |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `value` | number | Salary amount |
|
||||
| ↳ `openingId` | string | Associated opening UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -634,12 +857,6 @@ Lists all openings in Ashby with pagination.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `openings` | array | List of openings |
|
||||
| ↳ `id` | string | Opening UUID |
|
||||
| ↳ `openingState` | string | Opening state \(Approved, Closed, Draft, Filled, Open\) |
|
||||
| ↳ `isArchived` | boolean | Whether the opening is archived |
|
||||
| ↳ `openedAt` | string | ISO 8601 opened timestamp |
|
||||
| ↳ `closedAt` | string | ISO 8601 closed timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -661,6 +878,10 @@ Lists all candidate sources configured in Ashby.
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| ↳ `isArchived` | boolean | Whether the source is archived |
|
||||
| ↳ `sourceType` | object | Source type grouping |
|
||||
| ↳ `id` | string | Source type UUID |
|
||||
| ↳ `title` | string | Source type title |
|
||||
| ↳ `isArchived` | boolean | Whether archived |
|
||||
|
||||
### `ashby_list_users`
|
||||
|
||||
@@ -679,18 +900,12 @@ Lists all users in Ashby with pagination.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `users` | array | List of users |
|
||||
| ↳ `id` | string | User UUID |
|
||||
| ↳ `firstName` | string | First name |
|
||||
| ↳ `lastName` | string | Last name |
|
||||
| ↳ `email` | string | Email address |
|
||||
| ↳ `isEnabled` | boolean | Whether the user account is enabled |
|
||||
| ↳ `globalRole` | string | User role \(Organization Admin, Elevated Access, Limited Access, External Recruiter\) |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
### `ashby_remove_candidate_tag`
|
||||
|
||||
Removes a tag from a candidate in Ashby.
|
||||
Removes a tag from a candidate in Ashby and returns the updated candidate.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -704,7 +919,37 @@ Removes a tag from a candidate in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the tag was successfully removed |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_search_candidates`
|
||||
|
||||
@@ -723,18 +968,6 @@ Searches for candidates by name and/or email with AND logic. Results are limited
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `candidates` | array | Matching candidates \(max 100 results\) |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Full name |
|
||||
| ↳ `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| ↳ `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_update_candidate`
|
||||
|
||||
@@ -758,26 +991,36 @@ Updates an existing candidate record in Ashby. Only provided fields are changed.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `profileUrl` | string | URL to the candidate Ashby profile |
|
||||
| `position` | string | Current position or title |
|
||||
| `company` | string | Current company |
|
||||
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||
| `githubUrl` | string | GitHub profile URL |
|
||||
| `tags` | array | Tags applied to the candidate |
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| `applicationIds` | array | IDs of associated applications |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
|
||||
|
||||
@@ -97,6 +97,14 @@ Trigger workflow when a candidate is hired
|
||||
| ↳ `job` | object | job output from the tool |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| `offer` | object | offer output from the tool |
|
||||
| ↳ `id` | string | Accepted offer UUID |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `acceptanceStatus` | string | Offer acceptance status |
|
||||
| ↳ `offerStatus` | string | Offer process status |
|
||||
| ↳ `decidedAt` | string | Offer decision timestamp \(ISO 8601\) |
|
||||
| ↳ `latestVersion` | object | latestVersion output from the tool |
|
||||
| ↳ `id` | string | Latest offer version UUID |
|
||||
|
||||
|
||||
---
|
||||
|
||||
@@ -23,10 +23,6 @@ import { env } from '@/env'
|
||||
const logger = createLogger('SocketDatabase')
|
||||
|
||||
const connectionString = env.DATABASE_URL
|
||||
/**
|
||||
* Server-side safety net for runaway queries and abandoned transactions.
|
||||
* See `packages/db/index.ts` for rationale.
|
||||
*/
|
||||
const socketDb = drizzle(
|
||||
postgres(connectionString, {
|
||||
prepare: false,
|
||||
@@ -34,9 +30,6 @@ const socketDb = drizzle(
|
||||
connect_timeout: 20,
|
||||
max: 30,
|
||||
onnotice: () => {},
|
||||
connection: {
|
||||
options: '-c statement_timeout=90000 -c idle_in_transaction_session_timeout=90000',
|
||||
},
|
||||
}),
|
||||
{ schema }
|
||||
)
|
||||
|
||||
@@ -1031,7 +1031,7 @@
|
||||
},
|
||||
{
|
||||
"name": "List Applications",
|
||||
"description": "Lists all applications in an Ashby organization with pagination and optional filters for status, job, candidate, and creation date."
|
||||
"description": "Lists all applications in an Ashby organization with pagination and optional filters for status, job, and creation date."
|
||||
},
|
||||
{
|
||||
"name": "Get Application",
|
||||
@@ -1051,11 +1051,11 @@
|
||||
},
|
||||
{
|
||||
"name": "Add Candidate Tag",
|
||||
"description": "Adds a tag to a candidate in Ashby."
|
||||
"description": "Adds a tag to a candidate in Ashby and returns the updated candidate."
|
||||
},
|
||||
{
|
||||
"name": "Remove Candidate Tag",
|
||||
"description": "Removes a tag from a candidate in Ashby."
|
||||
"description": "Removes a tag from a candidate in Ashby and returns the updated candidate."
|
||||
},
|
||||
{
|
||||
"name": "Get Offer",
|
||||
|
||||
@@ -32,7 +32,9 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
const returnUrl = request.nextUrl.searchParams.get('returnUrl')
|
||||
|
||||
if (!shopDomain) {
|
||||
const returnUrlParam = returnUrl ? encodeURIComponent(returnUrl) : ''
|
||||
const safeReturnUrl =
|
||||
returnUrl && isSameOrigin(returnUrl) ? encodeURIComponent(returnUrl) : ''
|
||||
const returnUrlJsLiteral = JSON.stringify(safeReturnUrl)
|
||||
return new NextResponse(
|
||||
`<!DOCTYPE html>
|
||||
<html>
|
||||
@@ -120,7 +122,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const returnUrl = '${returnUrlParam}';
|
||||
const returnUrl = ${returnUrlJsLiteral};
|
||||
function handleSubmit(e) {
|
||||
e.preventDefault();
|
||||
let shop = document.getElementById('shop').value.trim().toLowerCase();
|
||||
|
||||
@@ -38,6 +38,7 @@ vi.mock('@/lib/copilot/request/session', () => ({
|
||||
}),
|
||||
encodeSSEEnvelope: (event: Record<string, unknown>) =>
|
||||
new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`),
|
||||
encodeSSEComment: (comment: string) => new TextEncoder().encode(`: ${comment}\n\n`),
|
||||
SSE_RESPONSE_HEADERS: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
},
|
||||
@@ -132,6 +133,7 @@ describe('copilot chat stream replay route', () => {
|
||||
)
|
||||
|
||||
const chunks = await readAllChunks(response)
|
||||
expect(chunks[0]).toBe(': accepted\n\n')
|
||||
expect(chunks.join('')).toContain(
|
||||
JSON.stringify({
|
||||
status: MothershipStreamV1CompletionStatus.cancelled,
|
||||
|
||||
@@ -19,6 +19,7 @@ import { getCopilotTracer, markSpanForError } from '@/lib/copilot/request/otel'
|
||||
import {
|
||||
checkForReplayGap,
|
||||
createEvent,
|
||||
encodeSSEComment,
|
||||
encodeSSEEnvelope,
|
||||
readEvents,
|
||||
readFilePreviewSessions,
|
||||
@@ -31,6 +32,7 @@ export const maxDuration = 3600
|
||||
|
||||
const logger = createLogger('CopilotChatStreamAPI')
|
||||
const POLL_INTERVAL_MS = 250
|
||||
const REPLAY_KEEPALIVE_INTERVAL_MS = 15_000
|
||||
const MAX_STREAM_MS = 60 * 60 * 1000
|
||||
|
||||
function extractCanonicalRequestId(value: unknown): string {
|
||||
@@ -266,6 +268,7 @@ async function handleResumeRequestBody({
|
||||
let controllerClosed = false
|
||||
let sawTerminalEvent = false
|
||||
let currentRequestId = extractRunRequestId(run)
|
||||
let lastWriteTime = Date.now()
|
||||
// Stamp the logical request id + chat id on the resume root as soon
|
||||
// as we resolve them from the run row, so TraceQL joins work on
|
||||
// resume legs the same way they do on the original POST.
|
||||
@@ -291,6 +294,19 @@ async function handleResumeRequestBody({
|
||||
if (controllerClosed) return false
|
||||
try {
|
||||
controller.enqueue(encodeSSEEnvelope(payload))
|
||||
lastWriteTime = Date.now()
|
||||
return true
|
||||
} catch {
|
||||
controllerClosed = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const enqueueComment = (comment: string) => {
|
||||
if (controllerClosed) return false
|
||||
try {
|
||||
controller.enqueue(encodeSSEComment(comment))
|
||||
lastWriteTime = Date.now()
|
||||
return true
|
||||
} catch {
|
||||
controllerClosed = true
|
||||
@@ -306,7 +322,6 @@ async function handleResumeRequestBody({
|
||||
const flushEvents = async () => {
|
||||
const events = await readEvents(streamId, cursor)
|
||||
if (events.length > 0) {
|
||||
totalEventsFlushed += events.length
|
||||
logger.debug('[Resume] Flushing events', {
|
||||
streamId,
|
||||
afterCursor: cursor,
|
||||
@@ -314,14 +329,15 @@ async function handleResumeRequestBody({
|
||||
})
|
||||
}
|
||||
for (const envelope of events) {
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
totalEventsFlushed += 1
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
currentRequestId = extractEnvelopeRequestId(envelope) || currentRequestId
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,21 +357,30 @@ async function handleResumeRequestBody({
|
||||
reason: options?.reason,
|
||||
requestId: currentRequestId,
|
||||
})) {
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
enqueueComment('accepted')
|
||||
|
||||
const gap = await checkForReplayGap(streamId, afterCursor, currentRequestId)
|
||||
if (gap) {
|
||||
for (const envelope of gap.envelopes) {
|
||||
enqueueEvent(envelope)
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
currentRequestId = extractEnvelopeRequestId(envelope) || currentRequestId
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -408,6 +433,10 @@ async function handleResumeRequestBody({
|
||||
break
|
||||
}
|
||||
|
||||
if (Date.now() - lastWriteTime >= REPLAY_KEEPALIVE_INTERVAL_MS) {
|
||||
enqueueComment('keepalive')
|
||||
}
|
||||
|
||||
await sleep(POLL_INTERVAL_MS)
|
||||
}
|
||||
if (!controllerClosed && Date.now() - startTime >= MAX_STREAM_MS) {
|
||||
|
||||
@@ -12,6 +12,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { normalizeEmail } from '@/lib/invitations/core'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
const logger = createLogger('CredentialSetInviteToken')
|
||||
@@ -111,6 +112,21 @@ export const POST = withRouteHandler(
|
||||
return NextResponse.json({ error: 'Invitation has expired' }, { status: 410 })
|
||||
}
|
||||
|
||||
if (invitation.email) {
|
||||
const sessionEmail = session.user.email
|
||||
if (!sessionEmail || normalizeEmail(sessionEmail) !== normalizeEmail(invitation.email)) {
|
||||
logger.warn('Rejected credential set invitation accept due to email mismatch', {
|
||||
invitationId: invitation.id,
|
||||
credentialSetId: invitation.credentialSetId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'This invitation was sent to a different email address' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(credentialSetMember)
|
||||
|
||||
@@ -8,21 +8,61 @@ import {
|
||||
isUsingCloudStorage,
|
||||
type StorageContext,
|
||||
} from '@/lib/uploads'
|
||||
import {
|
||||
signUploadToken,
|
||||
type UploadTokenPayload,
|
||||
verifyUploadToken,
|
||||
} from '@/lib/uploads/core/upload-token'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MultipartUploadAPI')
|
||||
|
||||
const ALLOWED_UPLOAD_CONTEXTS = new Set<StorageContext>([
|
||||
'knowledge-base',
|
||||
'chat',
|
||||
'copilot',
|
||||
'mothership',
|
||||
'execution',
|
||||
'workspace',
|
||||
'profile-pictures',
|
||||
'og-images',
|
||||
'logs',
|
||||
'workspace-logos',
|
||||
])
|
||||
|
||||
interface InitiateMultipartRequest {
|
||||
fileName: string
|
||||
contentType: string
|
||||
fileSize: number
|
||||
workspaceId: string
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest {
|
||||
uploadId: string
|
||||
key: string
|
||||
interface TokenBoundRequest {
|
||||
uploadToken: string
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest extends TokenBoundRequest {
|
||||
partNumbers: number[]
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface CompleteSingleRequest extends TokenBoundRequest {
|
||||
parts: unknown
|
||||
}
|
||||
|
||||
interface CompleteBatchRequest {
|
||||
uploads: Array<TokenBoundRequest & { parts: unknown }>
|
||||
}
|
||||
|
||||
const verifyTokenForUser = (token: string | undefined, userId: string) => {
|
||||
if (!token || typeof token !== 'string') {
|
||||
return null
|
||||
}
|
||||
const result = verifyUploadToken(token)
|
||||
if (!result.valid || result.payload.userId !== userId) {
|
||||
return null
|
||||
}
|
||||
return result.payload
|
||||
}
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
@@ -31,6 +71,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = session.user.id
|
||||
|
||||
const action = request.nextUrl.searchParams.get('action')
|
||||
|
||||
@@ -45,32 +86,34 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
|
||||
switch (action) {
|
||||
case 'initiate': {
|
||||
const data: InitiateMultipartRequest = await request.json()
|
||||
const { fileName, contentType, fileSize, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as InitiateMultipartRequest
|
||||
const { fileName, contentType, fileSize, workspaceId, context = 'knowledge-base' } = data
|
||||
|
||||
if (!workspaceId || typeof workspaceId !== 'string') {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!ALLOWED_UPLOAD_CONTEXTS.has(context)) {
|
||||
return NextResponse.json({ error: 'Invalid storage context' }, { status: 400 })
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
if (permission !== 'write' && permission !== 'admin') {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
let uploadId: string
|
||||
let key: string
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await initiateS3MultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
fileSize,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated S3 multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const result = await initiateS3MultipartUpload({ fileName, contentType, fileSize })
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { initiateMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await initiateMultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
@@ -82,46 +125,55 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
connectionString: config.connectionString,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated Azure multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
const uploadToken = signUploadToken({
|
||||
uploadId,
|
||||
key,
|
||||
userId,
|
||||
workspaceId,
|
||||
context,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated ${storageProvider} multipart upload for ${fileName} (context: ${context}, workspace: ${workspaceId}): ${uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({ uploadId, key, uploadToken })
|
||||
}
|
||||
|
||||
case 'get-part-urls': {
|
||||
const data: GetPartUrlsRequest = await request.json()
|
||||
const { uploadId, key, partNumbers, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as GetPartUrlsRequest
|
||||
const { partNumbers } = data
|
||||
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers)
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const presignedUrls = await getMultipartPartUrls(key, partNumbers, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
|
||||
@@ -132,24 +184,32 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'complete': {
|
||||
const data = await request.json()
|
||||
const context: StorageContext = data.context || 'knowledge-base'
|
||||
const data = (await request.json()) as CompleteSingleRequest | CompleteBatchRequest
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
if ('uploads' in data && Array.isArray(data.uploads)) {
|
||||
const verified = data.uploads.map((upload) => {
|
||||
const payload = verifyTokenForUser(upload.uploadToken, userId)
|
||||
return payload ? { payload, parts: upload.parts } : null
|
||||
})
|
||||
|
||||
if (verified.some((entry) => entry === null)) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const verifiedEntries = verified.filter(
|
||||
(entry): entry is { payload: UploadTokenPayload; parts: unknown } => entry !== null
|
||||
)
|
||||
|
||||
if ('uploads' in data) {
|
||||
const results = await Promise.all(
|
||||
data.uploads.map(async (upload: any) => {
|
||||
const { uploadId, key } = upload
|
||||
verifiedEntries.map(async ({ payload, parts }) => {
|
||||
const { uploadId, key, context } = payload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/s3/client'
|
||||
)
|
||||
const parts = upload.parts // S3 format: { ETag, PartNumber }
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts as any)
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -161,15 +221,12 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const { completeMultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/blob/client'
|
||||
)
|
||||
const parts = upload.parts // Azure format: { blockId, partNumber }
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -182,19 +239,23 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(`Completed ${data.uploads.length} multipart uploads (context: ${context})`)
|
||||
logger.info(`Completed ${verifiedEntries.length} multipart uploads`)
|
||||
return NextResponse.json({ results })
|
||||
}
|
||||
|
||||
const { uploadId, key, parts } = data
|
||||
const single = data as CompleteSingleRequest
|
||||
const tokenPayload = verifyTokenForUser(single.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, single.parts as any)
|
||||
logger.info(`Completed S3 multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -204,16 +265,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { completeMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, single.parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Completed Azure multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -229,27 +287,27 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'abort': {
|
||||
const data = await request.json()
|
||||
const { uploadId, key, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as TokenBoundRequest
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context as StorageContext)
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
await abortS3MultipartUpload(key, uploadId)
|
||||
|
||||
logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`)
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
await abortMultipartUpload(key, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Aborted Azure multipart upload for key ${key} (context: ${context})`)
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -66,6 +66,12 @@ const QueryRowsSchema = z.object({
|
||||
.min(0, 'Offset must be 0 or greater')
|
||||
.optional()
|
||||
.default(0),
|
||||
includeTotal: z
|
||||
.preprocess(
|
||||
(val) => (val === null || val === undefined || val === '' ? undefined : val === 'true'),
|
||||
z.boolean().optional()
|
||||
)
|
||||
.default(true),
|
||||
})
|
||||
|
||||
const nonEmptyFilter = z
|
||||
@@ -328,6 +334,7 @@ export const GET = withRouteHandler(
|
||||
const sortParam = searchParams.get('sort')
|
||||
const limit = searchParams.get('limit')
|
||||
const offset = searchParams.get('offset')
|
||||
const includeTotalParam = searchParams.get('includeTotal')
|
||||
|
||||
let filter: Record<string, unknown> | undefined
|
||||
let sort: Sort | undefined
|
||||
@@ -349,6 +356,7 @@ export const GET = withRouteHandler(
|
||||
sort,
|
||||
limit,
|
||||
offset,
|
||||
includeTotal: includeTotalParam,
|
||||
})
|
||||
|
||||
const accessResult = await checkAccess(tableId, authResult.userId, 'read')
|
||||
@@ -398,17 +406,19 @@ export const GET = withRouteHandler(
|
||||
query = query.orderBy(userTableRows.position) as typeof query
|
||||
}
|
||||
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
|
||||
const [{ count: totalCount }] = await countQuery
|
||||
let totalCount: number | null = null
|
||||
if (validated.includeTotal) {
|
||||
const [{ count }] = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
totalCount = Number(count)
|
||||
}
|
||||
|
||||
const rows = await query.limit(validated.limit).offset(validated.offset)
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount})`
|
||||
`[${requestId}] Queried ${rows.length} rows from table ${tableId} (total: ${totalCount ?? 'n/a'})`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
@@ -424,7 +434,7 @@ export const GET = withRouteHandler(
|
||||
r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
totalCount,
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
|
||||
@@ -71,6 +71,12 @@ const QueryRowsSchema = z.object({
|
||||
.optional()
|
||||
)
|
||||
.default(0),
|
||||
includeTotal: z
|
||||
.preprocess(
|
||||
(val) => (val === null || val === undefined || val === '' ? undefined : val === 'true'),
|
||||
z.boolean().optional()
|
||||
)
|
||||
.default(true),
|
||||
})
|
||||
|
||||
const nonEmptyFilter = z
|
||||
@@ -219,6 +225,7 @@ export const GET = withRouteHandler(
|
||||
sort,
|
||||
limit: searchParams.get('limit'),
|
||||
offset: searchParams.get('offset'),
|
||||
includeTotal: searchParams.get('includeTotal'),
|
||||
})
|
||||
|
||||
const scopeError = checkWorkspaceScope(rateLimit, validated.workspaceId)
|
||||
@@ -268,16 +275,37 @@ export const GET = withRouteHandler(
|
||||
query = query.orderBy(userTableRows.position) as typeof query
|
||||
}
|
||||
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
const rowsPromise = query.limit(validated.limit).offset(validated.offset)
|
||||
|
||||
const [countResult, rows] = await Promise.all([
|
||||
countQuery,
|
||||
query.limit(validated.limit).offset(validated.offset),
|
||||
])
|
||||
const totalCount = countResult[0].count
|
||||
let totalCount: number | null = null
|
||||
if (validated.includeTotal) {
|
||||
const countQuery = db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(userTableRows)
|
||||
.where(and(...baseConditions))
|
||||
const [countResult, rows] = await Promise.all([countQuery, rowsPromise])
|
||||
totalCount = Number(countResult[0].count)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
rows: rows.map((r) => ({
|
||||
id: r.id,
|
||||
data: r.data,
|
||||
position: r.position,
|
||||
createdAt:
|
||||
r.createdAt instanceof Date ? r.createdAt.toISOString() : String(r.createdAt),
|
||||
updatedAt:
|
||||
r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount,
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const rows = await rowsPromise
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
@@ -292,7 +320,7 @@ export const GET = withRouteHandler(
|
||||
r.updatedAt instanceof Date ? r.updatedAt.toISOString() : String(r.updatedAt),
|
||||
})),
|
||||
rowCount: rows.length,
|
||||
totalCount: Number(totalCount),
|
||||
totalCount,
|
||||
limit: validated.limit,
|
||||
offset: validated.offset,
|
||||
},
|
||||
|
||||
@@ -156,32 +156,86 @@ function toToolData(tc: NonNullable<ContentBlock['toolCall']>): ToolCallData {
|
||||
*/
|
||||
function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
const segments: MessageSegment[] = []
|
||||
let group: AgentGroupSegment | null = null
|
||||
const pushGroup = (nextGroup: AgentGroupSegment, isOpen = false) => {
|
||||
segments.push({ ...nextGroup, isOpen })
|
||||
const groupsByKey = new Map<string, AgentGroupSegment>()
|
||||
let activeGroupKey: string | null = null
|
||||
|
||||
const groupKey = (name: string, parentToolCallId: string | undefined) =>
|
||||
parentToolCallId ? `${name}:${parentToolCallId}` : `${name}:legacy`
|
||||
|
||||
const resolveGroupKey = (name: string, parentToolCallId: string | undefined) => {
|
||||
if (parentToolCallId) return groupKey(name, parentToolCallId)
|
||||
if (activeGroupKey && groupsByKey.get(activeGroupKey)?.agentName === name) {
|
||||
return activeGroupKey
|
||||
}
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (g.agentName === name && g.isOpen) return key
|
||||
}
|
||||
return groupKey(name, undefined)
|
||||
}
|
||||
|
||||
const ensureGroup = (
|
||||
name: string,
|
||||
parentToolCallId: string | undefined
|
||||
): { group: AgentGroupSegment; created: boolean } => {
|
||||
const key = resolveGroupKey(name, parentToolCallId)
|
||||
const existing = groupsByKey.get(key)
|
||||
if (existing) return { group: existing, created: false }
|
||||
const group: AgentGroupSegment = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${key}-${segments.length}`,
|
||||
agentName: name,
|
||||
agentLabel: resolveAgentLabel(name),
|
||||
items: [],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
segments.push(group)
|
||||
groupsByKey.set(key, group)
|
||||
return { group, created: true }
|
||||
}
|
||||
|
||||
const findGroupForSubagentChunk = (
|
||||
parentToolCallId: string | undefined
|
||||
): AgentGroupSegment | undefined => {
|
||||
if (parentToolCallId) {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(`:${parentToolCallId}`)) return g
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
if (activeGroupKey) return groupsByKey.get(activeGroupKey)
|
||||
return undefined
|
||||
}
|
||||
|
||||
const flushLanes = () => {
|
||||
for (const g of groupsByKey.values()) {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
groupsByKey.clear()
|
||||
activeGroupKey = null
|
||||
}
|
||||
|
||||
for (let i = 0; i < blocks.length; i++) {
|
||||
const block = blocks[i]
|
||||
|
||||
if (block.type === 'subagent_text' || block.type === 'subagent_thinking') {
|
||||
if (!block.content || !group) continue
|
||||
group.isDelegating = false
|
||||
const lastItem = group.items[group.items.length - 1]
|
||||
if (!block.content) continue
|
||||
const g = findGroupForSubagentChunk(block.parentToolCallId)
|
||||
if (!g) continue
|
||||
g.isDelegating = false
|
||||
const lastItem = g.items[g.items.length - 1]
|
||||
if (lastItem?.type === 'text') {
|
||||
lastItem.content += block.content
|
||||
} else {
|
||||
group.items.push({ type: 'text', content: block.content })
|
||||
g.items.push({ type: 'text', content: block.content })
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'thinking') {
|
||||
if (!block.content?.trim()) continue
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
const last = segments[segments.length - 1]
|
||||
if (last?.type === 'thinking' && last.endedAt === undefined) {
|
||||
last.content += block.content
|
||||
@@ -201,21 +255,19 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
if (block.type === 'text') {
|
||||
if (!block.content) continue
|
||||
if (block.subagent) {
|
||||
if (group && group.agentName === block.subagent) {
|
||||
group.isDelegating = false
|
||||
const lastItem = group.items[group.items.length - 1]
|
||||
const g = groupsByKey.get(resolveGroupKey(block.subagent, block.parentToolCallId))
|
||||
if (g) {
|
||||
g.isDelegating = false
|
||||
const lastItem = g.items[g.items.length - 1]
|
||||
if (lastItem?.type === 'text') {
|
||||
lastItem.content += block.content
|
||||
} else {
|
||||
group.items.push({ type: 'text', content: block.content })
|
||||
g.items.push({ type: 'text', content: block.content })
|
||||
}
|
||||
continue
|
||||
}
|
||||
}
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
const last = segments[segments.length - 1]
|
||||
if (last?.type === 'text') {
|
||||
last.content += block.content
|
||||
@@ -228,34 +280,23 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
if (block.type === 'subagent') {
|
||||
if (!block.content) continue
|
||||
const key = block.content
|
||||
if (group && group.agentName === key) continue
|
||||
|
||||
const dispatchToolName = SUBAGENT_DISPATCH_TOOLS[key]
|
||||
let inheritedDelegation = false
|
||||
if (group && dispatchToolName) {
|
||||
const last: AgentGroupItem | undefined = group.items[group.items.length - 1]
|
||||
if (last?.type === 'tool' && last.data.toolName === dispatchToolName) {
|
||||
inheritedDelegation = !isToolDone(last.data.status) && Boolean(last.data.streamingArgs)
|
||||
group.items.pop()
|
||||
const dispatchToolName = SUBAGENT_DISPATCH_TOOLS[key]
|
||||
if (dispatchToolName) {
|
||||
const mship = groupsByKey.get(groupKey('mothership', undefined))
|
||||
if (mship) {
|
||||
const last = mship.items[mship.items.length - 1]
|
||||
if (last?.type === 'tool' && last.data.toolName === dispatchToolName) {
|
||||
inheritedDelegation = !isToolDone(last.data.status) && Boolean(last.data.streamingArgs)
|
||||
mship.items.pop()
|
||||
}
|
||||
}
|
||||
if (group.items.length > 0) {
|
||||
pushGroup(group)
|
||||
}
|
||||
group = null
|
||||
} else if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${key}-${i}`,
|
||||
agentName: key,
|
||||
agentLabel: resolveAgentLabel(key),
|
||||
items: [],
|
||||
isDelegating: inheritedDelegation,
|
||||
isOpen: false,
|
||||
}
|
||||
groupsByKey.delete(groupKey('mothership', undefined))
|
||||
const { group: g } = ensureGroup(key, block.parentToolCallId)
|
||||
if (inheritedDelegation) g.isDelegating = true
|
||||
g.isOpen = true
|
||||
activeGroupKey = resolveGroupKey(key, block.parentToolCallId)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -267,95 +308,75 @@ function parseBlocks(blocks: ContentBlock[]): MessageSegment[] {
|
||||
const isDispatch = SUBAGENT_KEYS.has(tc.name) && !tc.calledBy
|
||||
|
||||
if (isDispatch) {
|
||||
if (!group || group.agentName !== tc.name) {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${tc.name}-${i}`,
|
||||
agentName: tc.name,
|
||||
agentLabel: resolveAgentLabel(tc.name),
|
||||
items: [],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
}
|
||||
group.isDelegating = isDelegatingTool(tc)
|
||||
groupsByKey.delete(groupKey('mothership', undefined))
|
||||
const { group: g } = ensureGroup(tc.name, tc.id)
|
||||
g.isDelegating = isDelegatingTool(tc)
|
||||
g.isOpen = g.isDelegating
|
||||
continue
|
||||
}
|
||||
|
||||
const tool = toToolData(tc)
|
||||
|
||||
if (tc.calledBy && group && group.agentName === tc.calledBy) {
|
||||
group.isDelegating = false
|
||||
group.items.push({ type: 'tool', data: tool })
|
||||
} else if (tc.calledBy) {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-${tc.calledBy}-${i}`,
|
||||
agentName: tc.calledBy,
|
||||
agentLabel: resolveAgentLabel(tc.calledBy),
|
||||
items: [{ type: 'tool', data: tool }],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
if (tc.calledBy) {
|
||||
const { group: g, created } = ensureGroup(tc.calledBy, block.parentToolCallId)
|
||||
g.isDelegating = false
|
||||
if (created && block.parentToolCallId) g.isOpen = true
|
||||
g.items.push({ type: 'tool', data: tool })
|
||||
activeGroupKey = resolveGroupKey(tc.calledBy, block.parentToolCallId)
|
||||
} else {
|
||||
if (group && group.agentName === 'mothership') {
|
||||
group.items.push({ type: 'tool', data: tool })
|
||||
} else {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
group = {
|
||||
type: 'agent_group',
|
||||
id: `agent-mothership-${i}`,
|
||||
agentName: 'mothership',
|
||||
agentLabel: 'Mothership',
|
||||
items: [{ type: 'tool', data: tool }],
|
||||
isDelegating: false,
|
||||
isOpen: false,
|
||||
}
|
||||
}
|
||||
const { group: g } = ensureGroup('mothership', undefined)
|
||||
g.items.push({ type: 'tool', data: tool })
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'options') {
|
||||
if (!block.options?.length) continue
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
segments.push({ type: 'options', items: block.options })
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'subagent_end') {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
if (block.parentToolCallId) {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(`:${block.parentToolCallId}`)) {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
}
|
||||
if (activeGroupKey?.endsWith(`:${block.parentToolCallId}`)) {
|
||||
activeGroupKey = null
|
||||
}
|
||||
} else {
|
||||
for (const [key, g] of groupsByKey) {
|
||||
if (key.endsWith(':legacy') && g.agentName !== 'mothership') {
|
||||
g.isOpen = false
|
||||
g.isDelegating = false
|
||||
}
|
||||
}
|
||||
if (activeGroupKey?.endsWith(':legacy')) {
|
||||
activeGroupKey = null
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (block.type === 'stopped') {
|
||||
if (group) {
|
||||
pushGroup(group)
|
||||
group = null
|
||||
}
|
||||
flushLanes()
|
||||
segments.push({ type: 'stopped' })
|
||||
}
|
||||
}
|
||||
|
||||
if (group) pushGroup(group, true)
|
||||
return segments
|
||||
const visibleSegments = segments.filter(
|
||||
(segment) =>
|
||||
segment.type !== 'agent_group' ||
|
||||
segment.items.length > 0 ||
|
||||
segment.isDelegating ||
|
||||
segment.isOpen
|
||||
)
|
||||
|
||||
return visibleSegments
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -428,12 +449,6 @@ export function MessageContent({
|
||||
isStreaming &&
|
||||
!hasTrailingContent &&
|
||||
(lastSegment.type === 'thinking' || hasSubagentEnded || allLastGroupToolsDone)
|
||||
const lastOpenSubagentGroupId = [...segments]
|
||||
.reverse()
|
||||
.find(
|
||||
(segment): segment is AgentGroupSegment =>
|
||||
segment.type === 'agent_group' && segment.agentName !== 'mothership' && segment.isOpen
|
||||
)?.id
|
||||
|
||||
return (
|
||||
<div className='space-y-[10px]'>
|
||||
@@ -488,8 +503,8 @@ export function MessageContent({
|
||||
items={segment.items}
|
||||
isDelegating={segment.isDelegating}
|
||||
isStreaming={isStreaming}
|
||||
autoCollapse={allToolsDone && hasFollowingText}
|
||||
defaultExpanded={segment.id === lastOpenSubagentGroupId}
|
||||
autoCollapse={!segment.isOpen && allToolsDone && hasFollowingText}
|
||||
defaultExpanded={segment.isOpen}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useLayoutEffect, useRef } from 'react'
|
||||
import { useCallback, useLayoutEffect, useMemo, useRef } from 'react'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import { MessageActions } from '@/app/workspace/[workspaceId]/components'
|
||||
import { ChatMessageAttachments } from '@/app/workspace/[workspaceId]/home/components/chat-message-attachments'
|
||||
@@ -22,6 +22,7 @@ import type {
|
||||
QueuedMessage,
|
||||
} from '@/app/workspace/[workspaceId]/home/types'
|
||||
import { useAutoScroll } from '@/hooks/use-auto-scroll'
|
||||
import { useProgressiveList } from '@/hooks/use-progressive-list'
|
||||
import type { ChatContext } from '@/stores/panel'
|
||||
import { MothershipChatSkeleton } from './mothership-chat-skeleton'
|
||||
|
||||
@@ -104,6 +105,21 @@ export function MothershipChat({
|
||||
scrollOnMount: true,
|
||||
})
|
||||
const hasMessages = messages.length > 0
|
||||
const stagingKey = chatId ?? 'pending-chat'
|
||||
const { staged: stagedMessages, isStaging } = useProgressiveList(messages, stagingKey)
|
||||
const stagedMessageCount = stagedMessages.length
|
||||
const stagedOffset = messages.length - stagedMessages.length
|
||||
const precedingUserContentByIndex = useMemo(() => {
|
||||
const contentByIndex: Array<string | undefined> = []
|
||||
let lastUserContent: string | undefined
|
||||
for (const [index, message] of messages.entries()) {
|
||||
contentByIndex[index] = lastUserContent
|
||||
if (message.role === 'user') {
|
||||
lastUserContent = message.content
|
||||
}
|
||||
}
|
||||
return contentByIndex
|
||||
}, [messages])
|
||||
const initialScrollDoneRef = useRef(false)
|
||||
const userInputRef = useRef<UserInputHandle>(null)
|
||||
const handleSendQueuedHead = useCallback(() => {
|
||||
@@ -134,6 +150,11 @@ export function MothershipChat({
|
||||
scrollToBottom()
|
||||
}, [hasMessages, initialScrollBlocked, scrollToBottom])
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (!isStaging || initialScrollBlocked || !initialScrollDoneRef.current) return
|
||||
scrollToBottom()
|
||||
}, [isStaging, stagedMessageCount, initialScrollBlocked, scrollToBottom])
|
||||
|
||||
return (
|
||||
<div className={cn('flex h-full min-h-0 flex-col', className)}>
|
||||
<div ref={scrollContainerRef} className={styles.scrollContainer}>
|
||||
@@ -141,7 +162,8 @@ export function MothershipChat({
|
||||
<MothershipChatSkeleton layout={layout} />
|
||||
) : (
|
||||
<div className={styles.content}>
|
||||
{messages.map((msg, index) => {
|
||||
{stagedMessages.map((msg, localIndex) => {
|
||||
const index = stagedOffset + localIndex
|
||||
if (msg.role === 'user') {
|
||||
const hasAttachments = Boolean(msg.attachments?.length)
|
||||
return (
|
||||
@@ -177,10 +199,7 @@ export function MothershipChat({
|
||||
}
|
||||
|
||||
const isLastMessage = index === messages.length - 1
|
||||
const precedingUserMsg = [...messages]
|
||||
.slice(0, index)
|
||||
.reverse()
|
||||
.find((m) => m.role === 'user')
|
||||
const precedingUserContent = precedingUserContentByIndex[index]
|
||||
|
||||
return (
|
||||
<div key={msg.id} className={styles.assistantRow}>
|
||||
@@ -196,7 +215,7 @@ export function MothershipChat({
|
||||
<MessageActions
|
||||
content={msg.content}
|
||||
chatId={chatId}
|
||||
userQuery={precedingUserMsg?.content}
|
||||
userQuery={precedingUserContent}
|
||||
requestId={msg.requestId}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -131,6 +131,7 @@ import type {
|
||||
MothershipResource,
|
||||
MothershipResourceType,
|
||||
QueuedMessage,
|
||||
ToolCallInfo,
|
||||
} from '../types'
|
||||
import { ToolCallStatus } from '../types'
|
||||
|
||||
@@ -701,7 +702,9 @@ function parseStreamBatchResponse(value: unknown): StreamBatchResponse {
|
||||
|
||||
function toRawPersistedContentBlock(block: ContentBlock): Record<string, unknown> | null {
|
||||
const persisted = toRawPersistedContentBlockBody(block)
|
||||
return persisted ? withBlockTiming(persisted, block) : null
|
||||
if (!persisted) return null
|
||||
if (block.parentToolCallId) persisted.parentToolCallId = block.parentToolCallId
|
||||
return withBlockTiming(persisted, block)
|
||||
}
|
||||
|
||||
function toRawPersistedContentBlockBody(block: ContentBlock): Record<string, unknown> | null {
|
||||
@@ -1215,7 +1218,7 @@ export function useChat(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
assistantId: string,
|
||||
expectedGen?: number,
|
||||
options?: { preserveExistingState?: boolean }
|
||||
options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean }
|
||||
) => Promise<{ sawStreamError: boolean; sawComplete: boolean }>
|
||||
>(async () => ({ sawStreamError: false, sawComplete: false }))
|
||||
const attachToExistingStreamRef = useRef<
|
||||
@@ -1457,6 +1460,9 @@ export function useChat(
|
||||
if (handledClientWorkflowToolIdsRef.current.has(toolCallId)) {
|
||||
return
|
||||
}
|
||||
if (recoveringClientWorkflowToolIdsRef.current.has(toolCallId)) {
|
||||
return
|
||||
}
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCallId)
|
||||
|
||||
ensureWorkflowToolResource(toolArgs)
|
||||
@@ -1467,41 +1473,41 @@ export function useChat(
|
||||
|
||||
const recoverPendingClientWorkflowTools = useCallback(
|
||||
async (nextMessages: ChatMessage[]) => {
|
||||
const pending: ToolCallInfo[] = []
|
||||
|
||||
for (const message of nextMessages) {
|
||||
for (const block of message.contentBlocks ?? []) {
|
||||
const toolCall = block.toolCall
|
||||
if (!toolCall || !isWorkflowToolName(toolCall.name)) {
|
||||
continue
|
||||
}
|
||||
if (toolCall.status !== 'executing') {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!toolCall || !isWorkflowToolName(toolCall.name)) continue
|
||||
if (toolCall.status !== 'executing') continue
|
||||
if (
|
||||
handledClientWorkflowToolIdsRef.current.has(toolCall.id) ||
|
||||
recoveringClientWorkflowToolIdsRef.current.has(toolCall.id)
|
||||
) {
|
||||
continue
|
||||
}
|
||||
|
||||
recoveringClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
pending.push(toolCall)
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const toolArgs = toolCall.params ?? {}
|
||||
const targetWorkflowId = ensureWorkflowToolResource(toolArgs)
|
||||
for (const toolCall of pending) {
|
||||
try {
|
||||
const toolArgs = toolCall.params ?? {}
|
||||
const targetWorkflowId = ensureWorkflowToolResource(toolArgs)
|
||||
|
||||
if (targetWorkflowId) {
|
||||
const rebound = await bindRunToolToExecution(toolCall.id, targetWorkflowId)
|
||||
if (rebound) {
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
continue
|
||||
}
|
||||
if (targetWorkflowId) {
|
||||
const rebound = await bindRunToolToExecution(toolCall.id, targetWorkflowId)
|
||||
if (rebound) {
|
||||
handledClientWorkflowToolIdsRef.current.add(toolCall.id)
|
||||
continue
|
||||
}
|
||||
|
||||
startClientWorkflowTool(toolCall.id, toolCall.name, toolArgs)
|
||||
} finally {
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
}
|
||||
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
startClientWorkflowTool(toolCall.id, toolCall.name, toolArgs)
|
||||
} finally {
|
||||
recoveringClientWorkflowToolIdsRef.current.delete(toolCall.id)
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1701,7 +1707,7 @@ export function useChat(
|
||||
reader: ReadableStreamDefaultReader<Uint8Array>,
|
||||
assistantId: string,
|
||||
expectedGen?: number,
|
||||
options?: { preserveExistingState?: boolean }
|
||||
options?: { preserveExistingState?: boolean; suppressWorkflowToolStarts?: boolean }
|
||||
) => {
|
||||
const decoder = new TextDecoder()
|
||||
streamReaderRef.current = reader
|
||||
@@ -1731,6 +1737,7 @@ export function useChat(
|
||||
for (let i = blocks.length - 1; i >= 0; i--) {
|
||||
if (blocks[i].type === 'subagent' && blocks[i].content) {
|
||||
activeSubagent = blocks[i].content
|
||||
activeSubagentParentToolCallId = blocks[i].parentToolCallId
|
||||
break
|
||||
}
|
||||
if (blocks[i].type === 'subagent_end') {
|
||||
@@ -1760,23 +1767,45 @@ export function useChat(
|
||||
if (block && block.endedAt === undefined) block.endedAt = toEventMs(ts)
|
||||
}
|
||||
|
||||
const ensureTextBlock = (subagentName: string | undefined, ts?: string): ContentBlock => {
|
||||
const ensureTextBlock = (
|
||||
subagentName: string | undefined,
|
||||
parentToolCallId: string | undefined,
|
||||
ts?: string
|
||||
): ContentBlock => {
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === 'text' && last.subagent === subagentName) return last
|
||||
if (
|
||||
last?.type === 'text' &&
|
||||
last.subagent === subagentName &&
|
||||
last.parentToolCallId === parentToolCallId
|
||||
) {
|
||||
return last
|
||||
}
|
||||
stampBlockEnd(last, ts)
|
||||
const b: ContentBlock = { type: 'text', content: '', timestamp: toEventMs(ts) }
|
||||
if (subagentName) b.subagent = subagentName
|
||||
if (parentToolCallId) b.parentToolCallId = parentToolCallId
|
||||
blocks.push(b)
|
||||
return b
|
||||
}
|
||||
|
||||
const ensureThinkingBlock = (subagentName: string | undefined, ts?: string): ContentBlock => {
|
||||
const ensureThinkingBlock = (
|
||||
subagentName: string | undefined,
|
||||
parentToolCallId: string | undefined,
|
||||
ts?: string
|
||||
): ContentBlock => {
|
||||
const targetType = subagentName ? 'subagent_thinking' : 'thinking'
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === targetType && last.subagent === subagentName) return last
|
||||
if (
|
||||
last?.type === targetType &&
|
||||
last.subagent === subagentName &&
|
||||
last.parentToolCallId === parentToolCallId
|
||||
) {
|
||||
return last
|
||||
}
|
||||
stampBlockEnd(last, ts)
|
||||
const b: ContentBlock = { type: targetType, content: '', timestamp: toEventMs(ts) }
|
||||
if (subagentName) b.subagent = subagentName
|
||||
if (parentToolCallId) b.parentToolCallId = parentToolCallId
|
||||
blocks.push(b)
|
||||
return b
|
||||
}
|
||||
@@ -1793,9 +1822,27 @@ export function useChat(
|
||||
return activeSubagent
|
||||
}
|
||||
|
||||
const appendInlineErrorTag = (tag: string, subagentName?: string, ts?: string) => {
|
||||
const resolveParentForSubagentBlock = (
|
||||
subagent: string | undefined,
|
||||
scopedParent: string | undefined
|
||||
): string | undefined => {
|
||||
if (!subagent) return undefined
|
||||
if (scopedParent) return scopedParent
|
||||
if (activeSubagent === subagent) return activeSubagentParentToolCallId
|
||||
for (const [parent, name] of subagentByParentToolCallId) {
|
||||
if (name === subagent) return parent
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const appendInlineErrorTag = (
|
||||
tag: string,
|
||||
subagentName?: string,
|
||||
parentToolCallId?: string,
|
||||
ts?: string
|
||||
) => {
|
||||
if (runningText.includes(tag)) return
|
||||
const tb = ensureTextBlock(subagentName, ts)
|
||||
const tb = ensureTextBlock(subagentName, parentToolCallId, ts)
|
||||
const prefix = runningText.length > 0 && !runningText.endsWith('\n') ? '\n' : ''
|
||||
tb.content = `${tb.content ?? ''}${prefix}${tag}`
|
||||
runningText += `${prefix}${tag}`
|
||||
@@ -2008,7 +2055,11 @@ export function useChat(
|
||||
if (chunk) {
|
||||
const eventTs = typeof parsed.ts === 'string' ? parsed.ts : undefined
|
||||
if (parsed.payload.channel === MothershipStreamV1TextChannel.thinking) {
|
||||
const tb = ensureThinkingBlock(scopedSubagent, eventTs)
|
||||
const scopedParentForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
const tb = ensureThinkingBlock(scopedSubagent, scopedParentForBlock, eventTs)
|
||||
tb.content = (tb.content ?? '') + chunk
|
||||
flushText()
|
||||
break
|
||||
@@ -2019,7 +2070,11 @@ export function useChat(
|
||||
lastContentSource !== contentSource &&
|
||||
runningText.length > 0 &&
|
||||
!runningText.endsWith('\n')
|
||||
const tb = ensureTextBlock(scopedSubagent, eventTs)
|
||||
const scopedParentForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
const tb = ensureTextBlock(scopedSubagent, scopedParentForBlock, eventTs)
|
||||
const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
|
||||
tb.content = (tb.content ?? '') + normalizedChunk
|
||||
runningText += normalizedChunk
|
||||
@@ -2355,9 +2410,17 @@ export function useChat(
|
||||
}
|
||||
}
|
||||
|
||||
if (!toolMap.has(id)) {
|
||||
const existingToolCall = toolMap.has(id)
|
||||
? blocks[toolMap.get(id)!]?.toolCall
|
||||
: undefined
|
||||
const isNewToolCall = !existingToolCall
|
||||
if (isNewToolCall) {
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
toolMap.set(id, blocks.length)
|
||||
const parentToolCallIdForBlock = resolveParentForSubagentBlock(
|
||||
scopedSubagent,
|
||||
scopedParentToolCallId
|
||||
)
|
||||
blocks.push({
|
||||
type: 'tool_call',
|
||||
toolCall: {
|
||||
@@ -2368,6 +2431,9 @@ export function useChat(
|
||||
params: args,
|
||||
calledBy: scopedSubagent,
|
||||
},
|
||||
...(parentToolCallIdForBlock
|
||||
? { parentToolCallId: parentToolCallIdForBlock }
|
||||
: {}),
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
if (name === ReadTool.id || isResourceToolName(name)) {
|
||||
@@ -2385,7 +2451,14 @@ export function useChat(
|
||||
flush()
|
||||
|
||||
if (isWorkflowToolName(name) && !isPartial) {
|
||||
startClientWorkflowTool(id, name, args ?? {})
|
||||
const shouldStartWorkflowTool =
|
||||
!options?.suppressWorkflowToolStarts &&
|
||||
(isNewToolCall ||
|
||||
(existingToolCall?.status === ToolCallStatus.executing &&
|
||||
!existingToolCall.result))
|
||||
if (shouldStartWorkflowTool) {
|
||||
startClientWorkflowTool(id, name, args ?? {})
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
@@ -2488,9 +2561,13 @@ export function useChat(
|
||||
break
|
||||
}
|
||||
const spanData = asPayloadRecord(payload.data)
|
||||
const parentToolCallId =
|
||||
scopedParentToolCallId ??
|
||||
(typeof spanData?.tool_call_id === 'string' ? spanData.tool_call_id : undefined)
|
||||
const parentToolCallIdFromData =
|
||||
typeof spanData?.tool_call_id === 'string'
|
||||
? spanData.tool_call_id
|
||||
: typeof spanData?.toolCallId === 'string'
|
||||
? spanData.toolCallId
|
||||
: undefined
|
||||
const parentToolCallId = scopedParentToolCallId ?? parentToolCallIdFromData
|
||||
const isPendingPause = spanData?.pending === true
|
||||
const name = typeof payload.agent === 'string' ? payload.agent : scopedAgentId
|
||||
if (payload.event === MothershipStreamV1SpanLifecycleEvent.start && name) {
|
||||
@@ -2505,7 +2582,12 @@ export function useChat(
|
||||
activeSubagentParentToolCallId = parentToolCallId
|
||||
if (!isSameActiveSubagent) {
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
blocks.push({ type: 'subagent', content: name, timestamp: Date.now() })
|
||||
blocks.push({
|
||||
type: 'subagent',
|
||||
content: name,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
if (name === FILE_SUBAGENT_ID && !isSameActiveSubagent) {
|
||||
applyPreviewSessionUpdate({
|
||||
@@ -2549,14 +2631,23 @@ export function useChat(
|
||||
if (name) {
|
||||
for (let i = blocks.length - 1; i >= 0; i--) {
|
||||
const b = blocks[i]
|
||||
if (b.type === 'subagent' && b.content === name && b.endedAt === undefined) {
|
||||
if (
|
||||
b.type === 'subagent' &&
|
||||
b.content === name &&
|
||||
b.endedAt === undefined &&
|
||||
(!parentToolCallId || b.parentToolCallId === parentToolCallId)
|
||||
) {
|
||||
b.endedAt = endNow
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
stampBlockEnd(blocks[blocks.length - 1])
|
||||
blocks.push({ type: 'subagent_end', timestamp: endNow })
|
||||
blocks.push({
|
||||
type: 'subagent_end',
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
timestamp: endNow,
|
||||
})
|
||||
flush()
|
||||
}
|
||||
break
|
||||
@@ -2567,6 +2658,7 @@ export function useChat(
|
||||
appendInlineErrorTag(
|
||||
buildInlineErrorTag(parsed.payload),
|
||||
scopedSubagent,
|
||||
resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId),
|
||||
typeof parsed.ts === 'string' ? parsed.ts : undefined
|
||||
)
|
||||
break
|
||||
@@ -2671,6 +2763,7 @@ export function useChat(
|
||||
let latestCursor = afterCursor
|
||||
let seedEvents = opts.initialBatch?.events ?? []
|
||||
let streamStatus = opts.initialBatch?.status ?? 'unknown'
|
||||
let suppressSeedWorkflowStarts = seedEvents.length > 0
|
||||
|
||||
const isStaleReconnect = () =>
|
||||
streamGenRef.current !== expectedGen || abortControllerRef.current?.signal.aborted === true
|
||||
@@ -2689,11 +2782,15 @@ export function useChat(
|
||||
buildReplayStream(seedEvents).getReader(),
|
||||
assistantId,
|
||||
expectedGen,
|
||||
{ preserveExistingState: true }
|
||||
{
|
||||
preserveExistingState: true,
|
||||
suppressWorkflowToolStarts: suppressSeedWorkflowStarts,
|
||||
}
|
||||
)
|
||||
latestCursor = String(seedEvents[seedEvents.length - 1]?.eventId ?? latestCursor)
|
||||
lastCursorRef.current = latestCursor
|
||||
seedEvents = []
|
||||
suppressSeedWorkflowStarts = false
|
||||
|
||||
if (replayResult.sawStreamError) {
|
||||
return { error: true, aborted: false }
|
||||
@@ -2998,6 +3095,7 @@ export function useChat(
|
||||
...(display ? { display } : {}),
|
||||
calledBy: block.toolCall.calledBy,
|
||||
},
|
||||
...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}),
|
||||
...timing,
|
||||
}
|
||||
}
|
||||
@@ -3005,6 +3103,7 @@ export function useChat(
|
||||
type: block.type,
|
||||
content: block.content,
|
||||
...(block.subagent ? { lane: 'subagent' } : {}),
|
||||
...(block.parentToolCallId ? { parentToolCallId: block.parentToolCallId } : {}),
|
||||
...timing,
|
||||
}
|
||||
})
|
||||
|
||||
@@ -133,6 +133,7 @@ export interface ContentBlock {
|
||||
options?: OptionItem[]
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface ChatMessageAttachment {
|
||||
|
||||
@@ -604,6 +604,10 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
const startTime = getHighResTime()
|
||||
|
||||
try {
|
||||
if (!options.workspaceId) {
|
||||
throw new Error('workspaceId is required for multipart upload')
|
||||
}
|
||||
|
||||
const initiateResponse = await fetch('/api/files/multipart?action=initiate', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
@@ -611,6 +615,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
fileName: file.name,
|
||||
contentType: getFileContentType(file),
|
||||
fileSize: file.size,
|
||||
workspaceId: options.workspaceId,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -618,7 +623,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
throw new Error(`Failed to initiate multipart upload: ${initiateResponse.statusText}`)
|
||||
}
|
||||
|
||||
const { uploadId, key } = await initiateResponse.json()
|
||||
const { uploadId, key, uploadToken } = await initiateResponse.json()
|
||||
logger.info(`Initiated multipart upload with ID: ${uploadId}`)
|
||||
|
||||
const chunkSize = UPLOAD_CONFIG.CHUNK_SIZE
|
||||
@@ -629,8 +634,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
uploadId,
|
||||
key,
|
||||
uploadToken,
|
||||
partNumbers,
|
||||
}),
|
||||
})
|
||||
@@ -639,7 +643,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
await fetch('/api/files/multipart?action=abort', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ uploadId, key }),
|
||||
body: JSON.stringify({ uploadToken }),
|
||||
})
|
||||
throw new Error(`Failed to get part URLs: ${partUrlsResponse.statusText}`)
|
||||
}
|
||||
@@ -723,8 +727,7 @@ export function useKnowledgeUpload(options: UseKnowledgeUploadOptions = {}) {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
uploadId,
|
||||
key,
|
||||
uploadToken,
|
||||
parts: uploadedParts,
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -34,6 +34,7 @@ export function useTableData({
|
||||
offset: 0,
|
||||
filter: queryOptions.filter,
|
||||
sort: queryOptions.sort,
|
||||
includeTotal: false,
|
||||
enabled: Boolean(workspaceId && tableId),
|
||||
})
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ComboboxOption } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
|
||||
import { useCanonicalSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-canonical-sub-block-value'
|
||||
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { FilterRuleRow } from './components/filter-rule-row'
|
||||
@@ -40,7 +41,7 @@ export function FilterBuilder({
|
||||
tableIdSubBlockId = 'tableId',
|
||||
}: FilterBuilderProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<FilterRule[]>(blockId, subBlockId)
|
||||
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
const tableIdValue = useCanonicalSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
|
||||
const dynamicColumns = useTableColumns({ tableId: tableIdValue })
|
||||
const columns = useMemo(() => {
|
||||
|
||||
@@ -5,6 +5,7 @@ import { generateId } from '@sim/utils/id'
|
||||
import type { ComboboxOption } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import { SORT_DIRECTIONS, type SortRule } from '@/lib/table/query-builder/constants'
|
||||
import { useCanonicalSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-canonical-sub-block-value'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { SortRuleRow } from './components/sort-rule-row'
|
||||
|
||||
@@ -36,7 +37,7 @@ export function SortBuilder({
|
||||
tableIdSubBlockId = 'tableId',
|
||||
}: SortBuilderProps) {
|
||||
const [storeValue, setStoreValue] = useSubBlockValue<SortRule[]>(blockId, subBlockId)
|
||||
const [tableIdValue] = useSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
const tableIdValue = useCanonicalSubBlockValue<string>(blockId, tableIdSubBlockId)
|
||||
|
||||
const dynamicColumns = useTableColumns({ tableId: tableIdValue, includeBuiltIn: true })
|
||||
const columns = useMemo(() => {
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { isEqual } from 'es-toolkit'
|
||||
import { useStoreWithEqualityFn } from 'zustand/traditional'
|
||||
import { buildCanonicalIndex, resolveDependencyValue } from '@/lib/workflows/subblocks/visibility'
|
||||
import { getBlock } from '@/blocks/registry'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
|
||||
/**
|
||||
* Read a sub-block value by either its raw subBlockId or its canonicalParamId.
|
||||
*
|
||||
* `useSubBlockValue` only looks up the raw subBlockId. For fields that use
|
||||
* `canonicalParamId` to unify basic/advanced inputs (e.g. `tableSelector` vs
|
||||
* `manualTableId` both mapping to `tableId`), this hook resolves to whichever
|
||||
* member of the canonical group currently holds the value.
|
||||
*/
|
||||
export function useCanonicalSubBlockValue<T = unknown>(
|
||||
blockId: string,
|
||||
canonicalOrSubBlockId: string
|
||||
): T | null {
|
||||
const activeWorkflowId = useWorkflowRegistry((s) => s.activeWorkflowId)
|
||||
const blockState = useWorkflowStore((state) => state.blocks[blockId])
|
||||
const blockConfig = blockState?.type ? getBlock(blockState.type) : null
|
||||
const canonicalIndex = useMemo(
|
||||
() => buildCanonicalIndex(blockConfig?.subBlocks || []),
|
||||
[blockConfig?.subBlocks]
|
||||
)
|
||||
const canonicalModeOverrides = blockState?.data?.canonicalModes
|
||||
|
||||
return useStoreWithEqualityFn(
|
||||
useSubBlockStore,
|
||||
useCallback(
|
||||
(state) => {
|
||||
if (!activeWorkflowId) return null
|
||||
const blockValues = state.workflowValues[activeWorkflowId]?.[blockId] || {}
|
||||
const resolved = resolveDependencyValue(
|
||||
canonicalOrSubBlockId,
|
||||
blockValues,
|
||||
canonicalIndex,
|
||||
canonicalModeOverrides
|
||||
)
|
||||
return (resolved ?? null) as T | null
|
||||
},
|
||||
[activeWorkflowId, blockId, canonicalOrSubBlockId, canonicalIndex, canonicalModeOverrides]
|
||||
),
|
||||
(a, b) => isEqual(a, b)
|
||||
)
|
||||
}
|
||||
@@ -34,7 +34,7 @@ import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import { coerceValue } from '@/executor/utils/start-block'
|
||||
import { subscriptionKeys } from '@/hooks/queries/subscription'
|
||||
import { getWorkflows } from '@/hooks/queries/utils/workflow-cache'
|
||||
import { useExecutionStream } from '@/hooks/use-execution-stream'
|
||||
import { isExecutionStreamHttpError, useExecutionStream } from '@/hooks/use-execution-stream'
|
||||
import { WorkflowValidationError } from '@/serializer'
|
||||
import { useCurrentWorkflowExecution, useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
@@ -60,6 +60,13 @@ const logger = createLogger('useWorkflowExecution')
|
||||
*/
|
||||
const activeReconnections = new Set<string>()
|
||||
|
||||
function isReconnectTerminal(error: unknown): boolean {
|
||||
return (
|
||||
isExecutionStreamHttpError(error) &&
|
||||
(error.httpStatus === 404 || error.httpStatus === 403 || error.httpStatus === 401)
|
||||
)
|
||||
}
|
||||
|
||||
interface DebugValidationResult {
|
||||
isValid: boolean
|
||||
error?: string
|
||||
@@ -1283,8 +1290,7 @@ export function useWorkflowExecution() {
|
||||
} else {
|
||||
if (!executor) {
|
||||
try {
|
||||
const httpStatus =
|
||||
isRecord(error) && typeof error.httpStatus === 'number' ? error.httpStatus : undefined
|
||||
const httpStatus = isExecutionStreamHttpError(error) ? error.httpStatus : undefined
|
||||
const storeAddConsole = useTerminalConsoleStore.getState().addConsole
|
||||
|
||||
if (httpStatus && activeWorkflowId) {
|
||||
@@ -1867,8 +1873,6 @@ export function useWorkflowExecution() {
|
||||
activeReconnections.add(reconnectWorkflowId)
|
||||
|
||||
executionStream.cancel(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, executionId)
|
||||
setIsExecuting(reconnectWorkflowId, true)
|
||||
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
const activeBlocksSet = new Set<string>()
|
||||
@@ -1891,13 +1895,47 @@ export function useWorkflowExecution() {
|
||||
includeStartConsoleEntry: true,
|
||||
})
|
||||
|
||||
clearExecutionEntries(executionId)
|
||||
|
||||
const capturedExecutionId = executionId
|
||||
const MAX_ATTEMPTS = 5
|
||||
const BASE_DELAY_MS = 1000
|
||||
const MAX_DELAY_MS = 15000
|
||||
|
||||
let activated = false
|
||||
const ensureActivated = () => {
|
||||
if (activated || cleanupRan) return
|
||||
activated = true
|
||||
setCurrentExecutionId(reconnectWorkflowId, capturedExecutionId)
|
||||
setIsExecuting(reconnectWorkflowId, true)
|
||||
clearExecutionEntries(capturedExecutionId)
|
||||
}
|
||||
|
||||
const wrapHandler =
|
||||
<T>(handler: (data: T) => void) =>
|
||||
(data: T) => {
|
||||
ensureActivated()
|
||||
handler(data)
|
||||
}
|
||||
|
||||
const cleanupFailedReconnect = () => {
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId && currentId !== capturedExecutionId) return
|
||||
|
||||
const hasRunningEntry = useTerminalConsoleStore
|
||||
.getState()
|
||||
.getWorkflowEntries(reconnectWorkflowId)
|
||||
.some((entry) => entry.isRunning && entry.executionId === capturedExecutionId)
|
||||
|
||||
if (activated || hasRunningEntry) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
}
|
||||
|
||||
if (currentId === capturedExecutionId) {
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
}
|
||||
}
|
||||
|
||||
const attemptReconnect = async (attempt: number): Promise<void> => {
|
||||
if (cleanupRan || reconnectionComplete) return
|
||||
|
||||
@@ -1914,38 +1952,39 @@ export function useWorkflowExecution() {
|
||||
fromEventId,
|
||||
callbacks: {
|
||||
onEventId: (eid) => {
|
||||
ensureActivated()
|
||||
fromEventId = eid
|
||||
},
|
||||
onBlockStarted: handlers.onBlockStarted,
|
||||
onBlockCompleted: handlers.onBlockCompleted,
|
||||
onBlockError: handlers.onBlockError,
|
||||
onBlockChildWorkflowStarted: handlers.onBlockChildWorkflowStarted,
|
||||
onBlockStarted: wrapHandler(handlers.onBlockStarted),
|
||||
onBlockCompleted: wrapHandler(handlers.onBlockCompleted),
|
||||
onBlockError: wrapHandler(handlers.onBlockError),
|
||||
onBlockChildWorkflowStarted: wrapHandler(handlers.onBlockChildWorkflowStarted),
|
||||
onExecutionCompleted: () => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
},
|
||||
onExecutionError: (data) => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
@@ -1957,16 +1996,16 @@ export function useWorkflowExecution() {
|
||||
})
|
||||
},
|
||||
onExecutionCancelled: () => {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (!activated) {
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
return
|
||||
}
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
if (currentId !== capturedExecutionId) return
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
@@ -1978,6 +2017,17 @@ export function useWorkflowExecution() {
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
if (isReconnectTerminal(error)) {
|
||||
logger.info('Reconnection skipped; run buffer no longer exists', {
|
||||
executionId: capturedExecutionId,
|
||||
})
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
clearExecutionPointer(reconnectWorkflowId)
|
||||
cleanupFailedReconnect()
|
||||
return
|
||||
}
|
||||
|
||||
logger.warn('Execution reconnection attempt failed', {
|
||||
executionId: capturedExecutionId,
|
||||
attempt,
|
||||
@@ -1986,17 +2036,27 @@ export function useWorkflowExecution() {
|
||||
if (!cleanupRan && !reconnectionComplete && attempt < MAX_ATTEMPTS) {
|
||||
return attemptReconnect(attempt + 1)
|
||||
}
|
||||
if (!cleanupRan && !reconnectionComplete) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
cleanupFailedReconnect()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if (!reconnectionComplete && !cleanupRan) {
|
||||
reconnectionComplete = true
|
||||
activeReconnections.delete(reconnectWorkflowId)
|
||||
const currentId = useExecutionStore.getState().getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId === capturedExecutionId) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
if (activated) {
|
||||
const currentId = useExecutionStore
|
||||
.getState()
|
||||
.getCurrentExecutionId(reconnectWorkflowId)
|
||||
if (currentId === capturedExecutionId) {
|
||||
cancelRunningEntries(reconnectWorkflowId)
|
||||
setCurrentExecutionId(reconnectWorkflowId, null)
|
||||
setIsExecuting(reconnectWorkflowId, false)
|
||||
setActiveBlocks(reconnectWorkflowId, new Set())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,169 +4,71 @@ import { createLogger } from '@sim/logger'
|
||||
import { task } from '@trigger.dev/sdk'
|
||||
import { and, inArray, lt } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
chunkedBatchDelete,
|
||||
type TableCleanupResult,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { snapshotService } from '@/lib/logs/execution/snapshot/service'
|
||||
import { isUsingCloudStorage, StorageService } from '@/lib/uploads'
|
||||
import { deleteFileMetadata } from '@/lib/uploads/server/metadata'
|
||||
|
||||
const logger = createLogger('CleanupLogs')
|
||||
|
||||
const BATCH_SIZE = 2000
|
||||
const MAX_BATCHES_PER_TIER = 10
|
||||
|
||||
interface TierResults {
|
||||
total: number
|
||||
deleted: number
|
||||
deleteFailed: number
|
||||
interface FileDeleteStats {
|
||||
filesTotal: number
|
||||
filesDeleted: number
|
||||
filesDeleteFailed: number
|
||||
}
|
||||
|
||||
function emptyTierResults(): TierResults {
|
||||
return {
|
||||
total: 0,
|
||||
deleted: 0,
|
||||
deleteFailed: 0,
|
||||
filesTotal: 0,
|
||||
filesDeleted: 0,
|
||||
filesDeleteFailed: 0,
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteExecutionFiles(files: unknown, results: TierResults): Promise<void> {
|
||||
async function deleteExecutionFiles(files: unknown, stats: FileDeleteStats): Promise<void> {
|
||||
if (!isUsingCloudStorage() || !files || !Array.isArray(files)) return
|
||||
|
||||
const keys = files.filter((f) => f && typeof f === 'object' && f.key).map((f) => f.key as string)
|
||||
results.filesTotal += keys.length
|
||||
stats.filesTotal += keys.length
|
||||
|
||||
await Promise.all(
|
||||
keys.map(async (key) => {
|
||||
try {
|
||||
await StorageService.deleteFile({ key, context: 'execution' })
|
||||
await deleteFileMetadata(key)
|
||||
results.filesDeleted++
|
||||
stats.filesDeleted++
|
||||
} catch (fileError) {
|
||||
results.filesDeleteFailed++
|
||||
stats.filesDeleteFailed++
|
||||
logger.error(`Failed to delete file ${key}:`, { fileError })
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
async function cleanupTier(
|
||||
async function cleanupWorkflowExecutionLogs(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date,
|
||||
label: string
|
||||
): Promise<TierResults> {
|
||||
const results = emptyTierResults()
|
||||
if (workspaceIds.length === 0) return results
|
||||
): Promise<TableCleanupResult & FileDeleteStats> {
|
||||
const fileStats: FileDeleteStats = { filesTotal: 0, filesDeleted: 0, filesDeleteFailed: 0 }
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < MAX_BATCHES_PER_TIER) {
|
||||
const batch = await db
|
||||
.select({
|
||||
id: workflowExecutionLogs.id,
|
||||
files: workflowExecutionLogs.files,
|
||||
})
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowExecutionLogs.workspaceId, workspaceIds),
|
||||
lt(workflowExecutionLogs.startedAt, retentionDate)
|
||||
const dbStats = await chunkedBatchDelete({
|
||||
tableDef: workflowExecutionLogs,
|
||||
workspaceIds,
|
||||
tableName: `${label}/workflow_execution_logs`,
|
||||
selectChunk: (chunkIds, limit) =>
|
||||
db
|
||||
.select({ id: workflowExecutionLogs.id, files: workflowExecutionLogs.files })
|
||||
.from(workflowExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflowExecutionLogs.workspaceId, chunkIds),
|
||||
lt(workflowExecutionLogs.startedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
.limit(limit),
|
||||
onBatch: async (rows) => {
|
||||
for (const row of rows) await deleteExecutionFiles(row.files, fileStats)
|
||||
},
|
||||
})
|
||||
|
||||
results.total += batch.length
|
||||
|
||||
if (batch.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
for (const log of batch) {
|
||||
await deleteExecutionFiles(log.files, results)
|
||||
}
|
||||
|
||||
const logIds = batch.map((log) => log.id)
|
||||
try {
|
||||
const deleted = await db
|
||||
.delete(workflowExecutionLogs)
|
||||
.where(inArray(workflowExecutionLogs.id, logIds))
|
||||
.returning({ id: workflowExecutionLogs.id })
|
||||
|
||||
results.deleted += deleted.length
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed += logIds.length
|
||||
logger.error(`Batch delete failed for ${label}:`, { deleteError })
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMore = batch.length === BATCH_SIZE
|
||||
|
||||
logger.info(`[${label}] Batch ${batchesProcessed}: ${batch.length} logs processed`)
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
interface JobLogCleanupResults {
|
||||
deleted: number
|
||||
deleteFailed: number
|
||||
}
|
||||
|
||||
async function cleanupJobExecutionLogsTier(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date,
|
||||
label: string
|
||||
): Promise<JobLogCleanupResults> {
|
||||
const results: JobLogCleanupResults = { deleted: 0, deleteFailed: 0 }
|
||||
if (workspaceIds.length === 0) return results
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < MAX_BATCHES_PER_TIER) {
|
||||
const batch = await db
|
||||
.select({ id: jobExecutionLogs.id })
|
||||
.from(jobExecutionLogs)
|
||||
.where(
|
||||
and(
|
||||
inArray(jobExecutionLogs.workspaceId, workspaceIds),
|
||||
lt(jobExecutionLogs.startedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
.limit(BATCH_SIZE)
|
||||
|
||||
if (batch.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
const logIds = batch.map((log) => log.id)
|
||||
try {
|
||||
const deleted = await db
|
||||
.delete(jobExecutionLogs)
|
||||
.where(inArray(jobExecutionLogs.id, logIds))
|
||||
.returning({ id: jobExecutionLogs.id })
|
||||
|
||||
results.deleted += deleted.length
|
||||
} catch (deleteError) {
|
||||
results.deleteFailed += logIds.length
|
||||
logger.error(`Batch delete failed for ${label} (job_execution_logs):`, { deleteError })
|
||||
}
|
||||
|
||||
batchesProcessed++
|
||||
hasMore = batch.length === BATCH_SIZE
|
||||
|
||||
logger.info(
|
||||
`[${label}] job_execution_logs batch ${batchesProcessed}: ${batch.length} rows processed`
|
||||
)
|
||||
}
|
||||
|
||||
return results
|
||||
return { ...dbStats, ...fileStats }
|
||||
}
|
||||
|
||||
export async function runCleanupLogs(payload: CleanupJobPayload): Promise<void> {
|
||||
@@ -190,15 +92,19 @@ export async function runCleanupLogs(payload: CleanupJobPayload): Promise<void>
|
||||
`[${label}] Cleaning ${workspaceIds.length} workspaces, cutoff: ${retentionDate.toISOString()}`
|
||||
)
|
||||
|
||||
const results = await cleanupTier(workspaceIds, retentionDate, label)
|
||||
const workflowResults = await cleanupWorkflowExecutionLogs(workspaceIds, retentionDate, label)
|
||||
logger.info(
|
||||
`[${label}] workflow_execution_logs: ${results.deleted} deleted, ${results.deleteFailed} failed out of ${results.total} candidates`
|
||||
`[${label}] workflow_execution_logs files: ${workflowResults.filesDeleted}/${workflowResults.filesTotal} deleted, ${workflowResults.filesDeleteFailed} failed`
|
||||
)
|
||||
|
||||
const jobLogResults = await cleanupJobExecutionLogsTier(workspaceIds, retentionDate, label)
|
||||
logger.info(
|
||||
`[${label}] job_execution_logs: ${jobLogResults.deleted} deleted, ${jobLogResults.deleteFailed} failed`
|
||||
)
|
||||
await batchDeleteByWorkspaceAndTimestamp({
|
||||
tableDef: jobExecutionLogs,
|
||||
workspaceIdCol: jobExecutionLogs.workspaceId,
|
||||
timestampCol: jobExecutionLogs.startedAt,
|
||||
workspaceIds,
|
||||
retentionDate,
|
||||
tableName: `${label}/job_execution_logs`,
|
||||
})
|
||||
|
||||
// Snapshot cleanup runs only on the free job to avoid running it N times for N enterprise workspaces.
|
||||
if (payload.plan === 'free') {
|
||||
|
||||
@@ -18,9 +18,8 @@ import { and, inArray, isNotNull, lt } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
DEFAULT_BATCH_SIZE,
|
||||
DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
deleteRowsById,
|
||||
selectRowsByIdChunks,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { prepareChatCleanup } from '@/lib/cleanup/chat-cleanup'
|
||||
import type { StorageContext } from '@/lib/uploads'
|
||||
@@ -44,35 +43,37 @@ async function selectExpiredWorkspaceFiles(
|
||||
workspaceIds: string[],
|
||||
retentionDate: Date
|
||||
): Promise<WorkspaceFileScope> {
|
||||
const limit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE
|
||||
|
||||
const [legacyRows, multiContextRows] = await Promise.all([
|
||||
db
|
||||
.select({ id: workspaceFile.id, key: workspaceFile.key })
|
||||
.from(workspaceFile)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFile.workspaceId, workspaceIds),
|
||||
isNotNull(workspaceFile.deletedAt),
|
||||
lt(workspaceFile.deletedAt, retentionDate)
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: workspaceFile.id, key: workspaceFile.key })
|
||||
.from(workspaceFile)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFile.workspaceId, chunkIds),
|
||||
isNotNull(workspaceFile.deletedAt),
|
||||
lt(workspaceFile.deletedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(limit),
|
||||
db
|
||||
.select({
|
||||
id: workspaceFiles.id,
|
||||
key: workspaceFiles.key,
|
||||
context: workspaceFiles.context,
|
||||
})
|
||||
.from(workspaceFiles)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFiles.workspaceId, workspaceIds),
|
||||
isNotNull(workspaceFiles.deletedAt),
|
||||
lt(workspaceFiles.deletedAt, retentionDate)
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({
|
||||
id: workspaceFiles.id,
|
||||
key: workspaceFiles.key,
|
||||
context: workspaceFiles.context,
|
||||
})
|
||||
.from(workspaceFiles)
|
||||
.where(
|
||||
and(
|
||||
inArray(workspaceFiles.workspaceId, chunkIds),
|
||||
isNotNull(workspaceFiles.deletedAt),
|
||||
lt(workspaceFiles.deletedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(limit),
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
])
|
||||
|
||||
return {
|
||||
@@ -182,17 +183,19 @@ export async function runCleanupSoftDeletes(payload: CleanupJobPayload): Promise
|
||||
// (chats + S3) AND the DB deletes below — selecting twice could return
|
||||
// different subsets above the LIMIT cap and orphan or prematurely purge data.
|
||||
const [doomedWorkflows, fileScope] = await Promise.all([
|
||||
db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflow.workspaceId, workspaceIds),
|
||||
isNotNull(workflow.archivedAt),
|
||||
lt(workflow.archivedAt, retentionDate)
|
||||
selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(
|
||||
and(
|
||||
inArray(workflow.workspaceId, chunkIds),
|
||||
isNotNull(workflow.archivedAt),
|
||||
lt(workflow.archivedAt, retentionDate)
|
||||
)
|
||||
)
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE),
|
||||
.limit(chunkLimit)
|
||||
),
|
||||
selectExpiredWorkspaceFiles(workspaceIds, retentionDate),
|
||||
])
|
||||
|
||||
@@ -200,11 +203,13 @@ export async function runCleanupSoftDeletes(payload: CleanupJobPayload): Promise
|
||||
let chatCleanup: { execute: () => Promise<void> } | null = null
|
||||
|
||||
if (doomedWorkflowIds.length > 0) {
|
||||
const doomedChats = await db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(inArray(copilotChats.workflowId, doomedWorkflowIds))
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
const doomedChats = await selectRowsByIdChunks(doomedWorkflowIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(inArray(copilotChats.workflowId, chunkIds))
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
const doomedChatIds = doomedChats.map((c) => c.id)
|
||||
if (doomedChatIds.length > 0) {
|
||||
|
||||
@@ -13,9 +13,8 @@ import { and, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { type CleanupJobPayload, resolveCleanupScope } from '@/lib/billing/cleanup-dispatcher'
|
||||
import {
|
||||
batchDeleteByWorkspaceAndTimestamp,
|
||||
DEFAULT_BATCH_SIZE,
|
||||
DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
deleteRowsById,
|
||||
selectRowsByIdChunks,
|
||||
type TableCleanupResult,
|
||||
} from '@/lib/cleanup/batch-delete'
|
||||
import { prepareChatCleanup } from '@/lib/cleanup/chat-cleanup'
|
||||
@@ -67,13 +66,15 @@ async function cleanupRunChildren(
|
||||
): Promise<TableCleanupResult[]> {
|
||||
if (workspaceIds.length === 0) return []
|
||||
|
||||
const runIds = await db
|
||||
.select({ id: copilotRuns.id })
|
||||
.from(copilotRuns)
|
||||
.where(
|
||||
and(inArray(copilotRuns.workspaceId, workspaceIds), lt(copilotRuns.updatedAt, retentionDate))
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
const runIds = await selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotRuns.id })
|
||||
.from(copilotRuns)
|
||||
.where(
|
||||
and(inArray(copilotRuns.workspaceId, chunkIds), lt(copilotRuns.updatedAt, retentionDate))
|
||||
)
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
if (runIds.length === 0) {
|
||||
return RUN_CHILD_TABLES.map((t) => ({ table: `${label}/${t.name}`, deleted: 0, failed: 0 }))
|
||||
@@ -107,17 +108,15 @@ export async function runCleanupTasks(payload: CleanupJobPayload): Promise<void>
|
||||
`[${label}] Processing ${workspaceIds.length} workspaces, cutoff: ${retentionDate.toISOString()}`
|
||||
)
|
||||
|
||||
// Collect chat IDs before deleting so we can clean up the copilot backend after
|
||||
const doomedChats = await db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(
|
||||
and(
|
||||
inArray(copilotChats.workspaceId, workspaceIds),
|
||||
lt(copilotChats.updatedAt, retentionDate)
|
||||
const doomedChats = await selectRowsByIdChunks(workspaceIds, (chunkIds, chunkLimit) =>
|
||||
db
|
||||
.select({ id: copilotChats.id })
|
||||
.from(copilotChats)
|
||||
.where(
|
||||
and(inArray(copilotChats.workspaceId, chunkIds), lt(copilotChats.updatedAt, retentionDate))
|
||||
)
|
||||
)
|
||||
.limit(DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE)
|
||||
.limit(chunkLimit)
|
||||
)
|
||||
|
||||
const doomedChatIds = doomedChats.map((c) => c.id)
|
||||
|
||||
|
||||
@@ -113,7 +113,6 @@ export const AshbyBlock: BlockConfig = {
|
||||
id: 'email',
|
||||
title: 'Email',
|
||||
type: 'short-input',
|
||||
required: { field: 'operation', value: 'create_candidate' },
|
||||
placeholder: 'Email address',
|
||||
condition: { field: 'operation', value: ['create_candidate', 'update_candidate'] },
|
||||
},
|
||||
@@ -308,14 +307,6 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
condition: { field: 'operation', value: 'list_applications' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'filterCandidateId',
|
||||
title: 'Candidate ID Filter',
|
||||
type: 'short-input',
|
||||
placeholder: 'Filter by candidate UUID',
|
||||
condition: { field: 'operation', value: 'list_applications' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'createdAfter',
|
||||
title: 'Created After',
|
||||
@@ -366,6 +357,7 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
'list_openings',
|
||||
'list_users',
|
||||
'list_interviews',
|
||||
'list_candidate_tags',
|
||||
],
|
||||
},
|
||||
mode: 'advanced',
|
||||
@@ -386,10 +378,43 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
'list_openings',
|
||||
'list_users',
|
||||
'list_interviews',
|
||||
'list_candidate_tags',
|
||||
],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'syncToken',
|
||||
title: 'Sync Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Sync token for incremental updates',
|
||||
condition: { field: 'operation', value: 'list_candidate_tags' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'includeArchived',
|
||||
title: 'Include Archived',
|
||||
type: 'switch',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_candidate_tags', 'list_archive_reasons'],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'expandApplicationFormDefinition',
|
||||
title: 'Include Application Form Definition',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'get_job_posting' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'expandSurveyFormDefinitions',
|
||||
title: 'Include Survey Form Definitions',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'get_job_posting' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'tagId',
|
||||
title: 'Tag ID',
|
||||
@@ -476,11 +501,25 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
if (params.searchEmail) result.email = params.searchEmail
|
||||
if (params.filterStatus) result.status = params.filterStatus
|
||||
if (params.filterJobId) result.jobId = params.filterJobId
|
||||
if (params.filterCandidateId) result.candidateId = params.filterCandidateId
|
||||
if (params.jobStatus) result.status = params.jobStatus
|
||||
if (params.sendNotifications === 'true' || params.sendNotifications === true) {
|
||||
result.sendNotifications = true
|
||||
}
|
||||
if (params.includeArchived === 'true' || params.includeArchived === true) {
|
||||
result.includeArchived = true
|
||||
}
|
||||
if (
|
||||
params.expandApplicationFormDefinition === 'true' ||
|
||||
params.expandApplicationFormDefinition === true
|
||||
) {
|
||||
result.expandApplicationFormDefinition = true
|
||||
}
|
||||
if (
|
||||
params.expandSurveyFormDefinitions === 'true' ||
|
||||
params.expandSurveyFormDefinitions === true
|
||||
) {
|
||||
result.expandSurveyFormDefinitions = true
|
||||
}
|
||||
if (params.appCandidateId) result.candidateId = params.appCandidateId
|
||||
if (params.appCreatedAt) result.createdAt = params.appCreatedAt
|
||||
if (params.updateName) result.name = params.updateName
|
||||
@@ -515,11 +554,20 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
sendNotifications: { type: 'boolean', description: 'Send notifications' },
|
||||
filterStatus: { type: 'string', description: 'Application status filter' },
|
||||
filterJobId: { type: 'string', description: 'Job UUID filter' },
|
||||
filterCandidateId: { type: 'string', description: 'Candidate UUID filter' },
|
||||
createdAfter: { type: 'string', description: 'Filter by creation date' },
|
||||
jobStatus: { type: 'string', description: 'Job status filter' },
|
||||
cursor: { type: 'string', description: 'Pagination cursor' },
|
||||
perPage: { type: 'number', description: 'Results per page' },
|
||||
syncToken: { type: 'string', description: 'Sync token for incremental updates' },
|
||||
includeArchived: { type: 'boolean', description: 'Include archived records' },
|
||||
expandApplicationFormDefinition: {
|
||||
type: 'boolean',
|
||||
description: 'Include application form definition in job posting',
|
||||
},
|
||||
expandSurveyFormDefinitions: {
|
||||
type: 'boolean',
|
||||
description: 'Include survey form definitions in job posting',
|
||||
},
|
||||
tagId: { type: 'string', description: 'Tag UUID' },
|
||||
offerId: { type: 'string', description: 'Offer UUID' },
|
||||
jobPostingId: { type: 'string', description: 'Job posting UUID' },
|
||||
@@ -530,93 +578,113 @@ Output only the ISO 8601 timestamp string, nothing else.`,
|
||||
candidates: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of candidates (id, name, primaryEmailAddress, primaryPhoneNumber, createdAt, updatedAt)',
|
||||
'List of candidates with rich fields (id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses[], phoneNumbers[], socialLinks[], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents[], tags[], applicationIds[], customFields[], resumeFileHandle, fileHandles[], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt)',
|
||||
},
|
||||
jobs: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of jobs (id, title, status, employmentType, departmentId, locationId, createdAt, updatedAt)',
|
||||
'List of jobs (id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds[], customFields[], jobPostingIds[], customRequisitionId, brandId, hiringTeam[], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings[] with latestVersion, compensation with compensationTiers[])',
|
||||
},
|
||||
applications: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of applications (id, status, candidate, job, currentInterviewStage, source, createdAt, updatedAt)',
|
||||
'List of applications (id, status, customFields[], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields[], archivedAt, job summary, creditedToUser, hiringTeam[], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt)',
|
||||
},
|
||||
notes: {
|
||||
type: 'json',
|
||||
description: 'List of notes (id, content, author, createdAt)',
|
||||
description: 'List of notes (id, content, author, isPrivate, createdAt)',
|
||||
},
|
||||
offers: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of offers (id, offerStatus, acceptanceStatus, applicationId, startDate, salary, openingId)',
|
||||
'List of offers (id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields[]/fileHandles[]/author/approvalStatus)',
|
||||
},
|
||||
archiveReasons: {
|
||||
type: 'json',
|
||||
description: 'List of archive reasons (id, text, reasonType, isArchived)',
|
||||
description:
|
||||
'List of archive reasons (id, text, reasonType [RejectedByCandidate/RejectedByOrg/Other], isArchived)',
|
||||
},
|
||||
sources: {
|
||||
type: 'json',
|
||||
description: 'List of sources (id, title, isArchived)',
|
||||
description: 'List of sources (id, title, isArchived, sourceType {id, title, isArchived})',
|
||||
},
|
||||
customFields: {
|
||||
type: 'json',
|
||||
description: 'List of custom fields (id, title, fieldType, objectType, isArchived)',
|
||||
description:
|
||||
'List of custom field definitions (id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues[] {label, value, isArchived})',
|
||||
},
|
||||
departments: {
|
||||
type: 'json',
|
||||
description: 'List of departments (id, name, isArchived, parentId)',
|
||||
description:
|
||||
'List of departments (id, name, externalName, isArchived, parentId, createdAt, updatedAt)',
|
||||
},
|
||||
locations: {
|
||||
type: 'json',
|
||||
description: 'List of locations (id, name, isArchived, isRemote, address)',
|
||||
description:
|
||||
'List of locations (id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress)',
|
||||
},
|
||||
jobPostings: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of job postings (id, title, jobId, locationName, departmentName, employmentType, isListed, publishedDate)',
|
||||
'List of job postings (id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt)',
|
||||
},
|
||||
openings: {
|
||||
type: 'json',
|
||||
description: 'List of openings (id, openingState, isArchived, openedAt, closedAt)',
|
||||
description:
|
||||
'List of openings (id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds[]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds[]/hiringTeam[]/customFields[])',
|
||||
},
|
||||
users: {
|
||||
type: 'json',
|
||||
description: 'List of users (id, firstName, lastName, email, isEnabled, globalRole)',
|
||||
description:
|
||||
'List of users (id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId)',
|
||||
},
|
||||
interviewSchedules: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of interview schedules (id, applicationId, interviewStageId, status, createdAt)',
|
||||
'List of interview schedules (id, applicationId, interviewStageId, interviewEvents[] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt)',
|
||||
},
|
||||
tags: {
|
||||
type: 'json',
|
||||
description: 'List of candidate tags (id, title, isArchived)',
|
||||
},
|
||||
stageId: { type: 'string', description: 'Interview stage UUID after stage change' },
|
||||
success: { type: 'boolean', description: 'Whether the operation succeeded' },
|
||||
offerStatus: {
|
||||
type: 'string',
|
||||
description: 'Offer status (e.g. WaitingOnCandidateResponse, CandidateAccepted)',
|
||||
},
|
||||
acceptanceStatus: {
|
||||
type: 'string',
|
||||
description: 'Acceptance status (e.g. Accepted, Declined, Pending)',
|
||||
},
|
||||
applicationId: { type: 'string', description: 'Associated application UUID' },
|
||||
openingId: { type: 'string', description: 'Opening UUID associated with the offer' },
|
||||
salary: {
|
||||
type: 'json',
|
||||
description: 'Salary details from latest version (currencyCode, value)',
|
||||
},
|
||||
startDate: { type: 'string', description: 'Offer start date from latest version' },
|
||||
id: { type: 'string', description: 'Resource UUID' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
title: { type: 'string', description: 'Job title or job posting title' },
|
||||
status: { type: 'string', description: 'Status' },
|
||||
noteId: { type: 'string', description: 'Created note UUID' },
|
||||
candidate: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Candidate details (id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses[], phoneNumbers[], socialLinks[], customFields[], source, creditedToUser, createdAt, updatedAt)',
|
||||
},
|
||||
job: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Job details (id, title, status, employmentType, locationId, departmentId, hiringTeam[], author, location, openings[], compensation, createdAt, updatedAt)',
|
||||
},
|
||||
application: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Application details (id, status, customFields[], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam[], createdAt, updatedAt)',
|
||||
},
|
||||
offer: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Offer details (id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion)',
|
||||
},
|
||||
jobPosting: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Job posting details (id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy[], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt)',
|
||||
},
|
||||
content: { type: 'string', description: 'Note content' },
|
||||
author: {
|
||||
type: 'json',
|
||||
description: 'Note author (id, firstName, lastName, email, globalRole, isEnabled)',
|
||||
},
|
||||
isPrivate: { type: 'boolean', description: 'Whether the note is private' },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
moreDataAvailable: { type: 'boolean', description: 'Whether more pages exist' },
|
||||
nextCursor: { type: 'string', description: 'Pagination cursor for next page' },
|
||||
syncToken: { type: 'string', description: 'Sync token for incremental updates' },
|
||||
},
|
||||
}
|
||||
|
||||
@@ -46,6 +46,10 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
{ label: 'Get User Presence', id: 'get_user_presence' },
|
||||
{ label: 'Edit Canvas', id: 'edit_canvas' },
|
||||
{ label: 'Create Channel Canvas', id: 'create_channel_canvas' },
|
||||
{ label: 'Get Canvas Info', id: 'get_canvas' },
|
||||
{ label: 'List Canvases', id: 'list_canvases' },
|
||||
{ label: 'Lookup Canvas Sections', id: 'lookup_canvas_sections' },
|
||||
{ label: 'Delete Canvas', id: 'delete_canvas' },
|
||||
{ label: 'Create Conversation', id: 'create_conversation' },
|
||||
{ label: 'Invite to Conversation', id: 'invite_to_conversation' },
|
||||
{ label: 'Open View', id: 'open_view' },
|
||||
@@ -146,6 +150,9 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'get_canvas',
|
||||
'lookup_canvas_sections',
|
||||
'delete_canvas',
|
||||
'create_conversation',
|
||||
'open_view',
|
||||
'update_view',
|
||||
@@ -160,7 +167,11 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'manualChannel',
|
||||
@@ -182,6 +193,9 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
'get_user',
|
||||
'get_user_presence',
|
||||
'edit_canvas',
|
||||
'get_canvas',
|
||||
'lookup_canvas_sections',
|
||||
'delete_canvas',
|
||||
'create_conversation',
|
||||
'open_view',
|
||||
'update_view',
|
||||
@@ -196,7 +210,11 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
not: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'dmUserId',
|
||||
@@ -820,6 +838,121 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
value: 'create_channel_canvas',
|
||||
},
|
||||
},
|
||||
// Get Canvas specific fields
|
||||
{
|
||||
id: 'getCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'get_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// List Canvases specific fields
|
||||
{
|
||||
id: 'canvasListCount',
|
||||
title: 'Canvas Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListPage',
|
||||
title: 'Page',
|
||||
type: 'short-input',
|
||||
placeholder: '1',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListUser',
|
||||
title: 'User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional creator filter (e.g., U1234567890)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTsFrom',
|
||||
title: 'Created After',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (e.g., 123456789)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTsTo',
|
||||
title: 'Created Before',
|
||||
type: 'short-input',
|
||||
placeholder: 'Unix timestamp (e.g., 123456789)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'canvasListTeamId',
|
||||
title: 'Team ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Encoded team ID (org tokens only)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_canvases',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Lookup Canvas Sections specific fields
|
||||
{
|
||||
id: 'lookupCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'lookup_canvas_sections',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'sectionCriteria',
|
||||
title: 'Section Criteria',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: '{"section_types":["h1"],"contains_text":"Roadmap"}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'lookup_canvas_sections',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Delete Canvas specific fields
|
||||
{
|
||||
id: 'deleteCanvasId',
|
||||
title: 'Canvas ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter canvas ID (e.g., F1234ABCD)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'delete_canvas',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Create Conversation specific fields
|
||||
{
|
||||
id: 'conversationName',
|
||||
@@ -1058,6 +1191,10 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
'slack_get_user_presence',
|
||||
'slack_edit_canvas',
|
||||
'slack_create_channel_canvas',
|
||||
'slack_get_canvas',
|
||||
'slack_list_canvases',
|
||||
'slack_lookup_canvas_sections',
|
||||
'slack_delete_canvas',
|
||||
'slack_create_conversation',
|
||||
'slack_invite_to_conversation',
|
||||
'slack_open_view',
|
||||
@@ -1106,6 +1243,14 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
return 'slack_edit_canvas'
|
||||
case 'create_channel_canvas':
|
||||
return 'slack_create_channel_canvas'
|
||||
case 'get_canvas':
|
||||
return 'slack_get_canvas'
|
||||
case 'list_canvases':
|
||||
return 'slack_list_canvases'
|
||||
case 'lookup_canvas_sections':
|
||||
return 'slack_lookup_canvas_sections'
|
||||
case 'delete_canvas':
|
||||
return 'slack_delete_canvas'
|
||||
case 'create_conversation':
|
||||
return 'slack_create_conversation'
|
||||
case 'invite_to_conversation':
|
||||
@@ -1164,6 +1309,16 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
canvasTitle,
|
||||
channelCanvasTitle,
|
||||
channelCanvasContent,
|
||||
getCanvasId,
|
||||
canvasListCount,
|
||||
canvasListPage,
|
||||
canvasListUser,
|
||||
canvasListTsFrom,
|
||||
canvasListTsTo,
|
||||
canvasListTeamId,
|
||||
lookupCanvasId,
|
||||
sectionCriteria,
|
||||
deleteCanvasId,
|
||||
conversationName,
|
||||
isPrivate,
|
||||
teamId,
|
||||
@@ -1343,6 +1498,46 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
}
|
||||
break
|
||||
|
||||
case 'get_canvas':
|
||||
baseParams.canvasId = getCanvasId
|
||||
break
|
||||
|
||||
case 'list_canvases':
|
||||
if (canvasListCount) {
|
||||
const parsedCount = Number.parseInt(canvasListCount, 10)
|
||||
if (!Number.isNaN(parsedCount) && parsedCount > 0) {
|
||||
baseParams.count = parsedCount
|
||||
}
|
||||
}
|
||||
if (canvasListPage) {
|
||||
const parsedPage = Number.parseInt(canvasListPage, 10)
|
||||
if (!Number.isNaN(parsedPage) && parsedPage > 0) {
|
||||
baseParams.page = parsedPage
|
||||
}
|
||||
}
|
||||
if (canvasListUser) {
|
||||
baseParams.user = String(canvasListUser).trim()
|
||||
}
|
||||
if (canvasListTsFrom) {
|
||||
baseParams.tsFrom = String(canvasListTsFrom).trim()
|
||||
}
|
||||
if (canvasListTsTo) {
|
||||
baseParams.tsTo = String(canvasListTsTo).trim()
|
||||
}
|
||||
if (canvasListTeamId) {
|
||||
baseParams.teamId = String(canvasListTeamId).trim()
|
||||
}
|
||||
break
|
||||
|
||||
case 'lookup_canvas_sections':
|
||||
baseParams.canvasId = lookupCanvasId
|
||||
baseParams.criteria = sectionCriteria
|
||||
break
|
||||
|
||||
case 'delete_canvas':
|
||||
baseParams.canvasId = deleteCanvasId
|
||||
break
|
||||
|
||||
case 'create_conversation':
|
||||
baseParams.name = conversationName
|
||||
baseParams.isPrivate = isPrivate === 'true'
|
||||
@@ -1461,6 +1656,23 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
// Create Channel Canvas inputs
|
||||
channelCanvasTitle: { type: 'string', description: 'Title for channel canvas' },
|
||||
channelCanvasContent: { type: 'string', description: 'Content for channel canvas' },
|
||||
// Canvas management inputs
|
||||
getCanvasId: { type: 'string', description: 'Canvas ID to retrieve' },
|
||||
canvasListCount: { type: 'string', description: 'Maximum number of canvases to return' },
|
||||
canvasListPage: { type: 'string', description: 'Canvas list page number' },
|
||||
canvasListUser: { type: 'string', description: 'Optional canvas creator user filter' },
|
||||
canvasListTsFrom: {
|
||||
type: 'string',
|
||||
description: 'Filter canvases created after this timestamp',
|
||||
},
|
||||
canvasListTsTo: {
|
||||
type: 'string',
|
||||
description: 'Filter canvases created before this timestamp',
|
||||
},
|
||||
canvasListTeamId: { type: 'string', description: 'Encoded team ID for org tokens' },
|
||||
lookupCanvasId: { type: 'string', description: 'Canvas ID to search for sections' },
|
||||
sectionCriteria: { type: 'json', description: 'Canvas section lookup criteria' },
|
||||
deleteCanvasId: { type: 'string', description: 'Canvas ID to delete' },
|
||||
// Create Conversation inputs
|
||||
conversationName: { type: 'string', description: 'Name for the new channel' },
|
||||
isPrivate: { type: 'string', description: 'Create as private channel (true/false)' },
|
||||
@@ -1511,6 +1723,26 @@ Do not include any explanations, markdown formatting, or other text outside the
|
||||
// slack_canvas outputs
|
||||
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
canvas: {
|
||||
type: 'json',
|
||||
description: 'Canvas file metadata returned by Slack',
|
||||
},
|
||||
canvases: {
|
||||
type: 'json',
|
||||
description: 'Array of canvas file objects returned by Slack',
|
||||
},
|
||||
paging: {
|
||||
type: 'json',
|
||||
description: 'Pagination information for listed canvases',
|
||||
},
|
||||
sections: {
|
||||
type: 'json',
|
||||
description: 'Canvas section IDs returned by Slack section lookup',
|
||||
},
|
||||
ok: {
|
||||
type: 'boolean',
|
||||
description: 'Whether Slack completed the canvas operation successfully',
|
||||
},
|
||||
|
||||
// slack_message_reader outputs (read operation)
|
||||
messages: {
|
||||
|
||||
@@ -34,6 +34,7 @@ import {
|
||||
type ExecutionContext,
|
||||
getNextExecutionOrder,
|
||||
type NormalizedBlockOutput,
|
||||
type StreamingExecution,
|
||||
} from '@/executor/types'
|
||||
import { streamingResponseFormatProcessor } from '@/executor/utils'
|
||||
import { buildBlockExecutionError, normalizeError } from '@/executor/utils/errors'
|
||||
@@ -140,7 +141,7 @@ export class BlockExecutor {
|
||||
|
||||
let normalizedOutput: NormalizedBlockOutput
|
||||
if (isStreamingExecution) {
|
||||
const streamingExec = output as { stream: ReadableStream; execution: any }
|
||||
const streamingExec = output as StreamingExecution
|
||||
|
||||
if (ctx.onStream) {
|
||||
await this.handleStreamingExecution(
|
||||
@@ -602,7 +603,7 @@ export class BlockExecutor {
|
||||
ctx: ExecutionContext,
|
||||
node: DAGNode,
|
||||
block: SerializedBlock,
|
||||
streamingExec: { stream: ReadableStream; execution: any },
|
||||
streamingExec: StreamingExecution,
|
||||
resolvedInputs: Record<string, any>,
|
||||
selectedOutputs: string[]
|
||||
): Promise<void> {
|
||||
@@ -613,56 +614,39 @@ export class BlockExecutor {
|
||||
(block.config?.params as Record<string, any> | undefined)?.responseFormat ??
|
||||
(block.config as Record<string, any> | undefined)?.responseFormat
|
||||
|
||||
const stream = streamingExec.stream
|
||||
if (typeof stream.tee !== 'function') {
|
||||
await this.forwardStream(ctx, blockId, streamingExec, stream, responseFormat, selectedOutputs)
|
||||
return
|
||||
}
|
||||
const sourceReader = streamingExec.stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
const accumulated: string[] = []
|
||||
let drainError: unknown
|
||||
let sourceFullyDrained = false
|
||||
|
||||
const [clientStream, executorStream] = stream.tee()
|
||||
const clientSource = new ReadableStream<Uint8Array>({
|
||||
async pull(controller) {
|
||||
try {
|
||||
const { done, value } = await sourceReader.read()
|
||||
if (done) {
|
||||
const tail = decoder.decode()
|
||||
if (tail) accumulated.push(tail)
|
||||
sourceFullyDrained = true
|
||||
controller.close()
|
||||
return
|
||||
}
|
||||
accumulated.push(decoder.decode(value, { stream: true }))
|
||||
controller.enqueue(value)
|
||||
} catch (error) {
|
||||
drainError = error
|
||||
controller.error(error)
|
||||
}
|
||||
},
|
||||
async cancel(reason) {
|
||||
try {
|
||||
await sourceReader.cancel(reason)
|
||||
} catch {}
|
||||
},
|
||||
})
|
||||
|
||||
const processedClientStream = streamingResponseFormatProcessor.processStream(
|
||||
clientStream,
|
||||
blockId,
|
||||
selectedOutputs,
|
||||
responseFormat
|
||||
)
|
||||
|
||||
const clientStreamingExec = {
|
||||
...streamingExec,
|
||||
stream: processedClientStream,
|
||||
}
|
||||
|
||||
const executorConsumption = this.consumeExecutorStream(
|
||||
executorStream,
|
||||
streamingExec,
|
||||
blockId,
|
||||
responseFormat
|
||||
)
|
||||
|
||||
const clientConsumption = (async () => {
|
||||
try {
|
||||
await ctx.onStream?.(clientStreamingExec)
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error in onStream callback', { blockId, error })
|
||||
// Cancel the client stream to release the tee'd buffer
|
||||
await processedClientStream.cancel().catch(() => {})
|
||||
}
|
||||
})()
|
||||
|
||||
await Promise.all([clientConsumption, executorConsumption])
|
||||
}
|
||||
|
||||
private async forwardStream(
|
||||
ctx: ExecutionContext,
|
||||
blockId: string,
|
||||
streamingExec: { stream: ReadableStream; execution: any },
|
||||
stream: ReadableStream,
|
||||
responseFormat: any,
|
||||
selectedOutputs: string[]
|
||||
): Promise<void> {
|
||||
const processedStream = streamingResponseFormatProcessor.processStream(
|
||||
stream,
|
||||
clientSource,
|
||||
blockId,
|
||||
selectedOutputs,
|
||||
responseFormat
|
||||
@@ -670,72 +654,75 @@ export class BlockExecutor {
|
||||
|
||||
try {
|
||||
await ctx.onStream?.({
|
||||
...streamingExec,
|
||||
stream: processedStream,
|
||||
stream: processedClientStream,
|
||||
execution: streamingExec.execution,
|
||||
})
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error in onStream callback', { blockId, error })
|
||||
await processedStream.cancel().catch(() => {})
|
||||
}
|
||||
}
|
||||
|
||||
private async consumeExecutorStream(
|
||||
stream: ReadableStream,
|
||||
streamingExec: { execution: any },
|
||||
blockId: string,
|
||||
responseFormat: any
|
||||
): Promise<void> {
|
||||
const reader = stream.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
const chunks: string[] = []
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
const { done, value } = await reader.read()
|
||||
if (done) break
|
||||
chunks.push(decoder.decode(value, { stream: true }))
|
||||
}
|
||||
const tail = decoder.decode()
|
||||
if (tail) chunks.push(tail)
|
||||
} catch (error) {
|
||||
this.execLogger.error('Error reading executor stream for block', { blockId, error })
|
||||
await processedClientStream.cancel().catch(() => {})
|
||||
} finally {
|
||||
try {
|
||||
await reader.cancel().catch(() => {})
|
||||
sourceReader.releaseLock()
|
||||
} catch {}
|
||||
}
|
||||
|
||||
const fullContent = chunks.join('')
|
||||
if (drainError) {
|
||||
this.execLogger.error('Error reading stream for block', { blockId, error: drainError })
|
||||
return
|
||||
}
|
||||
|
||||
// If the onStream consumer exited before the source drained (e.g. it caught
|
||||
// an internal error and returned normally), `accumulated` holds a truncated
|
||||
// response. Persisting that to memory or setting it as the block output
|
||||
// would corrupt downstream state — skip and log instead.
|
||||
if (!sourceFullyDrained) {
|
||||
this.execLogger.warn(
|
||||
'Stream consumer exited before source drained; skipping content persistence',
|
||||
{
|
||||
blockId,
|
||||
}
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const fullContent = accumulated.join('')
|
||||
if (!fullContent) {
|
||||
return
|
||||
}
|
||||
|
||||
const executionOutput = streamingExec.execution?.output
|
||||
if (!executionOutput || typeof executionOutput !== 'object') {
|
||||
return
|
||||
}
|
||||
|
||||
if (responseFormat) {
|
||||
try {
|
||||
const parsed = JSON.parse(fullContent.trim())
|
||||
|
||||
streamingExec.execution.output = {
|
||||
...parsed,
|
||||
tokens: executionOutput.tokens,
|
||||
toolCalls: executionOutput.toolCalls,
|
||||
providerTiming: executionOutput.providerTiming,
|
||||
cost: executionOutput.cost,
|
||||
model: executionOutput.model,
|
||||
if (executionOutput && typeof executionOutput === 'object') {
|
||||
let parsedForFormat = false
|
||||
if (responseFormat) {
|
||||
try {
|
||||
const parsed = JSON.parse(fullContent.trim())
|
||||
streamingExec.execution.output = {
|
||||
...parsed,
|
||||
tokens: executionOutput.tokens,
|
||||
toolCalls: executionOutput.toolCalls,
|
||||
providerTiming: executionOutput.providerTiming,
|
||||
cost: executionOutput.cost,
|
||||
model: executionOutput.model,
|
||||
}
|
||||
parsedForFormat = true
|
||||
} catch (error) {
|
||||
this.execLogger.warn('Failed to parse streamed content for response format', {
|
||||
blockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
return
|
||||
} catch (error) {
|
||||
this.execLogger.warn('Failed to parse streamed content for response format', {
|
||||
blockId,
|
||||
error,
|
||||
})
|
||||
}
|
||||
if (!parsedForFormat) {
|
||||
executionOutput.content = fullContent
|
||||
}
|
||||
}
|
||||
|
||||
executionOutput.content = fullContent
|
||||
if (streamingExec.onFullContent) {
|
||||
try {
|
||||
await streamingExec.onFullContent(fullContent)
|
||||
} catch (error) {
|
||||
this.execLogger.error('onFullContent callback failed', { blockId, error })
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -958,8 +958,16 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
streamingExec: StreamingExecution
|
||||
): StreamingExecution {
|
||||
return {
|
||||
stream: memoryService.wrapStreamForPersistence(streamingExec.stream, ctx, inputs),
|
||||
stream: streamingExec.stream,
|
||||
execution: streamingExec.execution,
|
||||
onFullContent: async (content: string) => {
|
||||
if (!content.trim()) return
|
||||
try {
|
||||
await memoryService.appendToMemory(ctx, inputs, { role: 'assistant', content })
|
||||
} catch (error) {
|
||||
logger.error('Failed to persist streaming response:', error)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -111,35 +111,6 @@ export class Memory {
|
||||
})
|
||||
}
|
||||
|
||||
wrapStreamForPersistence(
|
||||
stream: ReadableStream<Uint8Array>,
|
||||
ctx: ExecutionContext,
|
||||
inputs: AgentInputs
|
||||
): ReadableStream<Uint8Array> {
|
||||
const chunks: string[] = []
|
||||
const decoder = new TextDecoder()
|
||||
|
||||
const transformStream = new TransformStream<Uint8Array, Uint8Array>({
|
||||
transform: (chunk, controller) => {
|
||||
controller.enqueue(chunk)
|
||||
const decoded = decoder.decode(chunk, { stream: true })
|
||||
chunks.push(decoded)
|
||||
},
|
||||
|
||||
flush: () => {
|
||||
const content = chunks.join('')
|
||||
if (content.trim()) {
|
||||
this.appendToMemory(ctx, inputs, {
|
||||
role: 'assistant',
|
||||
content,
|
||||
}).catch((error) => logger.error('Failed to persist streaming response:', error))
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
return stream.pipeThrough(transformStream)
|
||||
}
|
||||
|
||||
private requireWorkspaceId(ctx: ExecutionContext): string {
|
||||
if (!ctx.workspaceId) {
|
||||
throw new Error('workspaceId is required for memory operations')
|
||||
|
||||
@@ -359,6 +359,12 @@ export interface ExecutionResult {
|
||||
export interface StreamingExecution {
|
||||
stream: ReadableStream
|
||||
execution: ExecutionResult & { isStreaming?: boolean }
|
||||
/**
|
||||
* Invoked with the assembled response text after the stream drains. Lets agent
|
||||
* blocks persist the full response without interposing a TransformStream on a
|
||||
* fetch-backed source — that pattern amplifies memory on Bun via #28035.
|
||||
*/
|
||||
onFullContent?: (content: string) => void | Promise<void>
|
||||
}
|
||||
|
||||
export interface BlockExecutor {
|
||||
|
||||
@@ -38,11 +38,14 @@ interface TableRowsParams {
|
||||
offset: number
|
||||
filter?: Filter | null
|
||||
sort?: Sort | null
|
||||
/** When `false`, skip the server-side `COUNT(*)` and receive `totalCount: null`. */
|
||||
includeTotal?: boolean
|
||||
}
|
||||
|
||||
interface TableRowsResponse {
|
||||
rows: TableRow[]
|
||||
totalCount: number
|
||||
/** `null` when the request opted out of the count via `includeTotal: false`. */
|
||||
totalCount: number | null
|
||||
}
|
||||
|
||||
interface RowMutationContext {
|
||||
@@ -64,12 +67,14 @@ function createRowsParamsKey({
|
||||
offset,
|
||||
filter,
|
||||
sort,
|
||||
includeTotal,
|
||||
}: Omit<TableRowsParams, 'workspaceId' | 'tableId'>): string {
|
||||
return JSON.stringify({
|
||||
limit,
|
||||
offset,
|
||||
filter: filter ?? null,
|
||||
sort: sort ?? null,
|
||||
includeTotal: includeTotal ?? true,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -98,6 +103,7 @@ async function fetchTableRows({
|
||||
offset,
|
||||
filter,
|
||||
sort,
|
||||
includeTotal,
|
||||
signal,
|
||||
}: TableRowsParams & { signal?: AbortSignal }): Promise<TableRowsResponse> {
|
||||
const searchParams = new URLSearchParams({
|
||||
@@ -114,6 +120,10 @@ async function fetchTableRows({
|
||||
searchParams.set('sort', JSON.stringify(sort))
|
||||
}
|
||||
|
||||
if (includeTotal === false) {
|
||||
searchParams.set('includeTotal', 'false')
|
||||
}
|
||||
|
||||
const res = await fetch(`/api/table/${tableId}/rows?${searchParams}`, { signal })
|
||||
if (!res.ok) {
|
||||
const error = await res.json().catch(() => ({}))
|
||||
@@ -121,15 +131,15 @@ async function fetchTableRows({
|
||||
}
|
||||
|
||||
const json: {
|
||||
data?: { rows: TableRow[]; totalCount: number }
|
||||
data?: { rows: TableRow[]; totalCount: number | null }
|
||||
rows?: TableRow[]
|
||||
totalCount?: number
|
||||
totalCount?: number | null
|
||||
} = await res.json()
|
||||
|
||||
const data = json.data || json
|
||||
return {
|
||||
rows: (data.rows || []) as TableRow[],
|
||||
totalCount: data.totalCount || 0,
|
||||
totalCount: data.totalCount ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,9 +219,10 @@ export function useTableRows({
|
||||
offset,
|
||||
filter,
|
||||
sort,
|
||||
includeTotal,
|
||||
enabled = true,
|
||||
}: TableRowsParams & { enabled?: boolean }) {
|
||||
const paramsKey = createRowsParamsKey({ limit, offset, filter, sort })
|
||||
const paramsKey = createRowsParamsKey({ limit, offset, filter, sort, includeTotal })
|
||||
|
||||
return useQuery({
|
||||
queryKey: tableKeys.rows(tableId, paramsKey),
|
||||
@@ -223,6 +234,7 @@ export function useTableRows({
|
||||
offset,
|
||||
filter,
|
||||
sort,
|
||||
includeTotal,
|
||||
signal,
|
||||
}),
|
||||
enabled: Boolean(workspaceId && tableId) && enabled,
|
||||
@@ -393,7 +405,11 @@ export function useCreateTableRow({ workspaceId, tableId }: RowMutationContext)
|
||||
r.position >= row.position ? { ...r, position: r.position + 1 } : r
|
||||
)
|
||||
const rows: TableRow[] = [...shifted, row].sort((a, b) => a.position - b.position)
|
||||
return { ...old, rows, totalCount: old.totalCount + 1 }
|
||||
return {
|
||||
...old,
|
||||
rows,
|
||||
totalCount: old.totalCount === null ? null : old.totalCount + 1,
|
||||
}
|
||||
}
|
||||
)
|
||||
},
|
||||
|
||||
@@ -18,6 +18,20 @@ import type { SerializableExecutionState } from '@/executor/execution/types'
|
||||
|
||||
const logger = createLogger('useExecutionStream')
|
||||
|
||||
export class ExecutionStreamHttpError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly httpStatus: number
|
||||
) {
|
||||
super(message)
|
||||
this.name = 'ExecutionStreamHttpError'
|
||||
}
|
||||
}
|
||||
|
||||
export function isExecutionStreamHttpError(error: unknown): error is ExecutionStreamHttpError {
|
||||
return error instanceof ExecutionStreamHttpError
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects errors caused by the browser killing a fetch (page refresh, navigation, tab close).
|
||||
* These should be treated as clean disconnects, not execution errors.
|
||||
@@ -205,11 +219,13 @@ export function useExecutionStream() {
|
||||
|
||||
if (!response.ok) {
|
||||
const errorResponse = await response.json()
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
const error = new ExecutionStreamHttpError(
|
||||
errorResponse.error || 'Failed to start execution',
|
||||
response.status
|
||||
)
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -279,15 +295,18 @@ export function useExecutionStream() {
|
||||
try {
|
||||
errorResponse = await response.json()
|
||||
} catch {
|
||||
const error = new Error(`Server error (${response.status}): ${response.statusText}`)
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
throw new ExecutionStreamHttpError(
|
||||
`Server error (${response.status}): ${response.statusText}`,
|
||||
response.status
|
||||
)
|
||||
}
|
||||
const error = new Error(errorResponse.error || 'Failed to start execution')
|
||||
const error = new ExecutionStreamHttpError(
|
||||
errorResponse.error || 'Failed to start execution',
|
||||
response.status
|
||||
)
|
||||
if (errorResponse && typeof errorResponse === 'object') {
|
||||
Object.assign(error, { executionResult: errorResponse })
|
||||
}
|
||||
Object.assign(error, { httpStatus: response.status })
|
||||
throw error
|
||||
}
|
||||
|
||||
@@ -335,7 +354,9 @@ export function useExecutionStream() {
|
||||
`/api/workflows/${workflowId}/executions/${executionId}/stream?from=${fromEventId}`,
|
||||
{ signal: abortController.signal }
|
||||
)
|
||||
if (!response.ok) throw new Error(`Reconnect failed (${response.status})`)
|
||||
if (!response.ok) {
|
||||
throw new ExecutionStreamHttpError(`Reconnect failed (${response.status})`, response.status)
|
||||
}
|
||||
if (!response.body) throw new Error('No response body')
|
||||
|
||||
await processSSEStream(response.body.getReader(), callbacks, 'Reconnect')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useRef, useState } from 'react'
|
||||
import { useEffect, useLayoutEffect, useRef, useState } from 'react'
|
||||
|
||||
interface ProgressiveListOptions {
|
||||
/** Number of items to render in the initial batch (most recent items) */
|
||||
@@ -14,15 +14,31 @@ const DEFAULTS = {
|
||||
batchSize: 5,
|
||||
} satisfies Required<ProgressiveListOptions>
|
||||
|
||||
interface ProgressiveListState {
|
||||
key: string
|
||||
count: number
|
||||
caughtUp: boolean
|
||||
}
|
||||
|
||||
function createInitialState(
|
||||
key: string,
|
||||
itemCount: number,
|
||||
initialBatch: number
|
||||
): ProgressiveListState {
|
||||
const count = Math.min(itemCount, initialBatch)
|
||||
return {
|
||||
key,
|
||||
count,
|
||||
caughtUp: itemCount > 0 && count >= itemCount,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Progressively renders a list of items so that first paint is fast.
|
||||
*
|
||||
* On mount (or when `key` changes), only the most recent `initialBatch`
|
||||
* items are rendered. The rest are added in `batchSize` increments via
|
||||
* `requestAnimationFrame` so the browser never blocks on a large DOM mount.
|
||||
*
|
||||
* Once staging completes for a given key it never re-stages -- new items
|
||||
* appended to the list are rendered immediately.
|
||||
* `requestAnimationFrame`.
|
||||
*
|
||||
* @param items Full list of items to render.
|
||||
* @param key A session/conversation identifier. When it changes,
|
||||
@@ -35,67 +51,83 @@ export function useProgressiveList<T>(
|
||||
key: string,
|
||||
options?: ProgressiveListOptions
|
||||
): { staged: T[]; isStaging: boolean } {
|
||||
const initialBatch = options?.initialBatch ?? DEFAULTS.initialBatch
|
||||
const batchSize = options?.batchSize ?? DEFAULTS.batchSize
|
||||
const initialBatch = Math.max(0, options?.initialBatch ?? DEFAULTS.initialBatch)
|
||||
const batchSize = Math.max(1, options?.batchSize ?? DEFAULTS.batchSize)
|
||||
const [state, setState] = useState(() => createInitialState(key, items.length, initialBatch))
|
||||
const latestItemCountRef = useRef(items.length)
|
||||
|
||||
const completedKeysRef = useRef(new Set<string>())
|
||||
const prevKeyRef = useRef(key)
|
||||
const stagingCountRef = useRef(initialBatch)
|
||||
const [count, setCount] = useState(() => {
|
||||
if (items.length <= initialBatch) return items.length
|
||||
return initialBatch
|
||||
})
|
||||
useLayoutEffect(() => {
|
||||
latestItemCountRef.current = items.length
|
||||
}, [items.length])
|
||||
|
||||
const renderState =
|
||||
state.key === key && (state.count > 0 || items.length === 0 || state.caughtUp)
|
||||
? state
|
||||
: createInitialState(key, items.length, initialBatch)
|
||||
|
||||
useEffect(() => {
|
||||
if (completedKeysRef.current.has(key)) {
|
||||
setCount(items.length)
|
||||
return
|
||||
}
|
||||
|
||||
if (items.length <= initialBatch) {
|
||||
setCount(items.length)
|
||||
completedKeysRef.current.add(key)
|
||||
return
|
||||
}
|
||||
|
||||
let current = Math.max(stagingCountRef.current, initialBatch)
|
||||
setCount(current)
|
||||
|
||||
let frame: number | undefined
|
||||
|
||||
const step = () => {
|
||||
const total = items.length
|
||||
current = Math.min(total, current + batchSize)
|
||||
stagingCountRef.current = current
|
||||
setCount(current)
|
||||
if (current >= total) {
|
||||
completedKeysRef.current.add(key)
|
||||
frame = undefined
|
||||
return
|
||||
setState((prev) => {
|
||||
if (prev.key !== key) {
|
||||
return createInitialState(key, items.length, initialBatch)
|
||||
}
|
||||
frame = requestAnimationFrame(step)
|
||||
|
||||
if (items.length === 0) {
|
||||
if (prev.count === 0 && !prev.caughtUp) {
|
||||
return prev
|
||||
}
|
||||
return { key, count: 0, caughtUp: false }
|
||||
}
|
||||
|
||||
if (prev.caughtUp) {
|
||||
if (prev.count === items.length) {
|
||||
return prev
|
||||
}
|
||||
return { key, count: items.length, caughtUp: true }
|
||||
}
|
||||
|
||||
const minimumCount = Math.min(items.length, initialBatch)
|
||||
if (prev.count >= minimumCount && prev.count <= items.length) {
|
||||
return prev
|
||||
}
|
||||
|
||||
const count = Math.min(items.length, Math.max(prev.count, minimumCount))
|
||||
return {
|
||||
key,
|
||||
count,
|
||||
caughtUp: count >= items.length,
|
||||
}
|
||||
})
|
||||
}, [key, items.length, initialBatch])
|
||||
|
||||
useEffect(() => {
|
||||
if (state.key !== key || state.caughtUp || state.count >= items.length) {
|
||||
return
|
||||
}
|
||||
|
||||
frame = requestAnimationFrame(step)
|
||||
const frame = requestAnimationFrame(() => {
|
||||
setState((prev) => {
|
||||
if (prev.key !== key || prev.caughtUp) {
|
||||
return prev
|
||||
}
|
||||
|
||||
return () => {
|
||||
if (frame !== undefined) cancelAnimationFrame(frame)
|
||||
}
|
||||
}, [key, items.length, initialBatch, batchSize])
|
||||
const itemCount = latestItemCountRef.current
|
||||
const count = Math.min(itemCount, prev.count + batchSize)
|
||||
return {
|
||||
key,
|
||||
count,
|
||||
caughtUp: count >= itemCount,
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
let effectiveCount = count
|
||||
if (prevKeyRef.current !== key) {
|
||||
effectiveCount = items.length <= initialBatch ? items.length : initialBatch
|
||||
stagingCountRef.current = initialBatch
|
||||
}
|
||||
prevKeyRef.current = key
|
||||
return () => cancelAnimationFrame(frame)
|
||||
}, [state.key, state.count, state.caughtUp, key, items.length, batchSize])
|
||||
|
||||
const isCompleted = completedKeysRef.current.has(key)
|
||||
const isStaging = !isCompleted && effectiveCount < items.length
|
||||
const staged =
|
||||
isCompleted || effectiveCount >= items.length
|
||||
? items
|
||||
: items.slice(Math.max(0, items.length - effectiveCount))
|
||||
const effectiveCount = renderState.caughtUp
|
||||
? items.length
|
||||
: Math.min(renderState.count, items.length)
|
||||
const staged = items.slice(Math.max(0, items.length - effectiveCount))
|
||||
const isStaging = effectiveCount < items.length
|
||||
|
||||
return { staged, isStaging }
|
||||
}
|
||||
|
||||
@@ -7,6 +7,55 @@ const logger = createLogger('BatchDelete')
|
||||
|
||||
export const DEFAULT_BATCH_SIZE = 2000
|
||||
export const DEFAULT_MAX_BATCHES_PER_TABLE = 10
|
||||
/**
|
||||
* Split workspaceIds into this-sized groups before running SELECT/DELETE. Large
|
||||
* IN lists combined with `started_at < X` force Postgres to probe every
|
||||
* workspace range in the composite index, which blows the 90s statement timeout
|
||||
* at the scale of the full free tier.
|
||||
*/
|
||||
export const DEFAULT_WORKSPACE_CHUNK_SIZE = 50
|
||||
|
||||
export function chunkArray<T>(arr: T[], size: number): T[][] {
|
||||
const out: T[][] = []
|
||||
for (let i = 0; i < arr.length; i += size) out.push(arr.slice(i, i + size))
|
||||
return out
|
||||
}
|
||||
|
||||
export interface SelectByIdChunksOptions {
|
||||
/** Cap on rows returned across all chunks. Defaults to a full per-table cleanup budget. */
|
||||
overallLimit?: number
|
||||
chunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a SELECT query once per ID chunk and concatenate results up to
|
||||
* `overallLimit`. Each chunk's query is passed the remaining row budget so the
|
||||
* total never exceeds the cap. Use this when you need the selected row set
|
||||
* (e.g. to drive S3 or copilot-backend cleanup alongside the DB delete).
|
||||
*
|
||||
* Works for any large ID set — workspace IDs, workflow IDs, etc. Avoids
|
||||
* sending one massive `IN (...)` list that would blow Postgres's statement
|
||||
* timeout.
|
||||
*/
|
||||
export async function selectRowsByIdChunks<T>(
|
||||
ids: string[],
|
||||
query: (chunkIds: string[], chunkLimit: number) => Promise<T[]>,
|
||||
{
|
||||
overallLimit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
chunkSize = DEFAULT_WORKSPACE_CHUNK_SIZE,
|
||||
}: SelectByIdChunksOptions = {}
|
||||
): Promise<T[]> {
|
||||
if (ids.length === 0) return []
|
||||
|
||||
const rows: T[] = []
|
||||
for (const chunkIds of chunkArray(ids, chunkSize)) {
|
||||
if (rows.length >= overallLimit) break
|
||||
const remaining = overallLimit - rows.length
|
||||
const chunkRows = await query(chunkIds, remaining)
|
||||
rows.push(...chunkRows)
|
||||
}
|
||||
return rows
|
||||
}
|
||||
|
||||
export interface TableCleanupResult {
|
||||
table: string
|
||||
@@ -14,6 +63,111 @@ export interface TableCleanupResult {
|
||||
failed: number
|
||||
}
|
||||
|
||||
export interface ChunkedBatchDeleteOptions<TRow extends { id: string }> {
|
||||
tableDef: PgTable
|
||||
workspaceIds: string[]
|
||||
tableName: string
|
||||
/** SELECT eligible rows for one workspace chunk. The result must include `id`. */
|
||||
selectChunk: (chunkIds: string[], limit: number) => Promise<TRow[]>
|
||||
/** Runs between SELECT and DELETE; receives the just-selected rows. */
|
||||
onBatch?: (rows: TRow[]) => Promise<void>
|
||||
batchSize?: number
|
||||
/** Max batches per workspace chunk. */
|
||||
maxBatches?: number
|
||||
/**
|
||||
* Hard cap on rows processed (deleted + failed) across all chunks per call.
|
||||
* Defaults to `DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE`. Cron
|
||||
* runs frequently enough to catch up the backlog over multiple invocations.
|
||||
*/
|
||||
totalRowLimit?: number
|
||||
workspaceChunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Inner loop primitive for cleanup jobs.
|
||||
*
|
||||
* For each workspace chunk: SELECT a batch of eligible rows → run optional
|
||||
* `onBatch` hook (e.g. to delete S3 files) → DELETE those rows by ID. Repeats
|
||||
* until exhausted or `maxBatches` is hit, then moves to the next chunk. Stops
|
||||
* the whole call once `totalRowLimit` rows have been processed.
|
||||
*
|
||||
* Workspace IDs are chunked before the SELECT — see
|
||||
* `DEFAULT_WORKSPACE_CHUNK_SIZE` for why.
|
||||
*/
|
||||
export async function chunkedBatchDelete<TRow extends { id: string }>({
|
||||
tableDef,
|
||||
workspaceIds,
|
||||
tableName,
|
||||
selectChunk,
|
||||
onBatch,
|
||||
batchSize = DEFAULT_BATCH_SIZE,
|
||||
maxBatches = DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
totalRowLimit = DEFAULT_BATCH_SIZE * DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
workspaceChunkSize = DEFAULT_WORKSPACE_CHUNK_SIZE,
|
||||
}: ChunkedBatchDeleteOptions<TRow>): Promise<TableCleanupResult> {
|
||||
const result: TableCleanupResult = { table: tableName, deleted: 0, failed: 0 }
|
||||
|
||||
if (workspaceIds.length === 0) {
|
||||
logger.info(`[${tableName}] Skipped — no workspaces in scope`)
|
||||
return result
|
||||
}
|
||||
|
||||
const chunks = chunkArray(workspaceIds, workspaceChunkSize)
|
||||
let stoppedEarly = false
|
||||
|
||||
for (const [chunkIdx, chunkIds] of chunks.entries()) {
|
||||
if (result.deleted + result.failed >= totalRowLimit) {
|
||||
stoppedEarly = true
|
||||
break
|
||||
}
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (
|
||||
hasMore &&
|
||||
batchesProcessed < maxBatches &&
|
||||
result.deleted + result.failed < totalRowLimit
|
||||
) {
|
||||
let rows: TRow[] = []
|
||||
try {
|
||||
rows = await selectChunk(chunkIds, batchSize)
|
||||
|
||||
if (rows.length === 0) {
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
if (onBatch) await onBatch(rows)
|
||||
|
||||
const ids = rows.map((r) => r.id)
|
||||
const deleted = await db
|
||||
.delete(tableDef)
|
||||
.where(inArray(sql`id`, ids))
|
||||
.returning({ id: sql`id` })
|
||||
|
||||
result.deleted += deleted.length
|
||||
hasMore = rows.length === batchSize
|
||||
batchesProcessed++
|
||||
} catch (error) {
|
||||
// Count rows we tried to delete; SELECT-stage errors leave rows=[].
|
||||
result.failed += rows.length
|
||||
logger.error(
|
||||
`[${tableName}] Batch failed (chunk ${chunkIdx + 1}/${chunks.length}, ${rows.length} rows):`,
|
||||
{ error }
|
||||
)
|
||||
hasMore = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${tableName}] Complete: ${result.deleted} deleted, ${result.failed} failed across ${chunks.length} chunks${stoppedEarly ? ' (row-limit reached, remaining chunks deferred to next run)' : ''}`
|
||||
)
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
export interface BatchDeleteOptions {
|
||||
tableDef: PgTable
|
||||
workspaceIdCol: PgColumn
|
||||
@@ -25,13 +179,13 @@ export interface BatchDeleteOptions {
|
||||
requireTimestampNotNull?: boolean
|
||||
batchSize?: number
|
||||
maxBatches?: number
|
||||
workspaceChunkSize?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Iteratively delete rows in a table matching a workspace + time-based predicate.
|
||||
*
|
||||
* Uses a SELECT-with-LIMIT → DELETE-by-ID pattern to keep each round bounded in
|
||||
* memory and I/O (PostgreSQL DELETE does not support LIMIT directly).
|
||||
* Convenience wrapper around `chunkedBatchDelete` for the common case: delete
|
||||
* rows where `workspaceId IN (...) AND timestamp < retentionDate`. Use this
|
||||
* when there's no per-row side effect (e.g. no S3 files to clean up alongside).
|
||||
*/
|
||||
export async function batchDeleteByWorkspaceAndTimestamp({
|
||||
tableDef,
|
||||
@@ -41,56 +195,23 @@ export async function batchDeleteByWorkspaceAndTimestamp({
|
||||
retentionDate,
|
||||
tableName,
|
||||
requireTimestampNotNull = false,
|
||||
batchSize = DEFAULT_BATCH_SIZE,
|
||||
maxBatches = DEFAULT_MAX_BATCHES_PER_TABLE,
|
||||
...rest
|
||||
}: BatchDeleteOptions): Promise<TableCleanupResult> {
|
||||
const result: TableCleanupResult = { table: tableName, deleted: 0, failed: 0 }
|
||||
|
||||
if (workspaceIds.length === 0) {
|
||||
logger.info(`[${tableName}] Skipped — no workspaces in scope`)
|
||||
return result
|
||||
}
|
||||
|
||||
const predicates = [inArray(workspaceIdCol, workspaceIds), lt(timestampCol, retentionDate)]
|
||||
if (requireTimestampNotNull) predicates.push(isNotNull(timestampCol))
|
||||
const whereClause = and(...predicates)
|
||||
|
||||
let batchesProcessed = 0
|
||||
let hasMore = true
|
||||
|
||||
while (hasMore && batchesProcessed < maxBatches) {
|
||||
try {
|
||||
const batch = await db
|
||||
return chunkedBatchDelete({
|
||||
tableDef,
|
||||
workspaceIds,
|
||||
tableName,
|
||||
selectChunk: (chunkIds, limit) => {
|
||||
const predicates = [inArray(workspaceIdCol, chunkIds), lt(timestampCol, retentionDate)]
|
||||
if (requireTimestampNotNull) predicates.push(isNotNull(timestampCol))
|
||||
return db
|
||||
.select({ id: sql<string>`id` })
|
||||
.from(tableDef)
|
||||
.where(whereClause)
|
||||
.limit(batchSize)
|
||||
|
||||
if (batch.length === 0) {
|
||||
logger.info(`[${tableName}] No expired rows found`)
|
||||
hasMore = false
|
||||
break
|
||||
}
|
||||
|
||||
const ids = batch.map((r) => r.id)
|
||||
const deleted = await db
|
||||
.delete(tableDef)
|
||||
.where(inArray(sql`id`, ids))
|
||||
.returning({ id: sql`id` })
|
||||
|
||||
result.deleted += deleted.length
|
||||
hasMore = batch.length === batchSize
|
||||
batchesProcessed++
|
||||
|
||||
logger.info(`[${tableName}] Batch ${batchesProcessed}: deleted ${deleted.length} rows`)
|
||||
} catch (error) {
|
||||
result.failed++
|
||||
logger.error(`[${tableName}] Batch delete failed:`, { error })
|
||||
hasMore = false
|
||||
}
|
||||
}
|
||||
|
||||
return result
|
||||
.where(and(...predicates))
|
||||
.limit(limit)
|
||||
},
|
||||
...rest,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -46,7 +46,11 @@ function toToolCallInfo(block: PersistedContentBlock): ToolCallInfo | undefined
|
||||
|
||||
function toDisplayBlock(block: PersistedContentBlock): ContentBlock | undefined {
|
||||
const displayed = toDisplayBlockBody(block)
|
||||
return displayed ? withBlockTiming(displayed, block) : undefined
|
||||
if (!displayed) return undefined
|
||||
if (block.parentToolCallId && displayed.parentToolCallId === undefined) {
|
||||
displayed.parentToolCallId = block.parentToolCallId
|
||||
}
|
||||
return withBlockTiming(displayed, block)
|
||||
}
|
||||
|
||||
function toDisplayBlockBody(block: PersistedContentBlock): ContentBlock | undefined {
|
||||
|
||||
@@ -77,11 +77,16 @@ function appendTextBlock(
|
||||
content: string,
|
||||
options: {
|
||||
lane?: 'subagent'
|
||||
parentToolCallId?: string
|
||||
}
|
||||
): void {
|
||||
if (!content) return
|
||||
const last = blocks[blocks.length - 1]
|
||||
if (last?.type === MothershipStreamV1EventType.text && last.lane === options.lane) {
|
||||
if (
|
||||
last?.type === MothershipStreamV1EventType.text &&
|
||||
last.lane === options.lane &&
|
||||
last.parentToolCallId === options.parentToolCallId
|
||||
) {
|
||||
last.content = `${typeof last.content === 'string' ? last.content : ''}${content}`
|
||||
return
|
||||
}
|
||||
@@ -89,6 +94,7 @@ function appendTextBlock(
|
||||
blocks.push({
|
||||
type: MothershipStreamV1EventType.text,
|
||||
...(options.lane ? { lane: options.lane } : {}),
|
||||
...(options.parentToolCallId ? { parentToolCallId: options.parentToolCallId } : {}),
|
||||
content,
|
||||
})
|
||||
}
|
||||
@@ -122,10 +128,24 @@ function buildLiveAssistantMessage(params: {
|
||||
return activeSubagent
|
||||
}
|
||||
|
||||
const resolveParentForSubagentBlock = (
|
||||
subagent: string | undefined,
|
||||
scopedParent: string | undefined
|
||||
): string | undefined => {
|
||||
if (!subagent) return undefined
|
||||
if (scopedParent) return scopedParent
|
||||
if (activeSubagent === subagent) return activeSubagentParentToolCallId
|
||||
for (const [parent, name] of subagentByParentToolCallId) {
|
||||
if (name === subagent) return parent
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const ensureToolBlock = (input: {
|
||||
toolCallId: string
|
||||
toolName: string
|
||||
calledBy?: string
|
||||
parentToolCallId?: string
|
||||
displayTitle?: string
|
||||
params?: Record<string, unknown>
|
||||
result?: { success: boolean; output?: unknown; error?: string }
|
||||
@@ -155,6 +175,7 @@ function buildLiveAssistantMessage(params: {
|
||||
? { display: existingToolCall.display }
|
||||
: {}),
|
||||
}
|
||||
if (input.parentToolCallId) existing.parentToolCallId = input.parentToolCallId
|
||||
return existing
|
||||
}
|
||||
|
||||
@@ -176,6 +197,7 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
...(input.parentToolCallId ? { parentToolCallId: input.parentToolCallId } : {}),
|
||||
}
|
||||
toolIndexById.set(input.toolCallId, blocks.length)
|
||||
blocks.push(nextBlock)
|
||||
@@ -219,8 +241,10 @@ function buildLiveAssistantMessage(params: {
|
||||
runningText.length > 0 &&
|
||||
!runningText.endsWith('\n')
|
||||
const normalizedChunk = needsBoundaryNewline ? `\n${chunk}` : chunk
|
||||
const parentForBlock = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
appendTextBlock(blocks, normalizedChunk, {
|
||||
...(scopedSubagent ? { lane: 'subagent' as const } : {}),
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
})
|
||||
runningText += normalizedChunk
|
||||
lastContentSource = contentSource
|
||||
@@ -239,11 +263,14 @@ function buildLiveAssistantMessage(params: {
|
||||
continue
|
||||
}
|
||||
|
||||
const parentForBlock = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
|
||||
if (payload.phase === MothershipStreamV1ToolPhase.result) {
|
||||
ensureToolBlock({
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
calledBy: scopedSubagent,
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
state: resolveStreamToolOutcome(payload),
|
||||
result: {
|
||||
success: payload.success,
|
||||
@@ -258,6 +285,7 @@ function buildLiveAssistantMessage(params: {
|
||||
toolCallId,
|
||||
toolName: payload.toolName,
|
||||
calledBy: scopedSubagent,
|
||||
...(parentForBlock ? { parentToolCallId: parentForBlock } : {}),
|
||||
displayTitle,
|
||||
params: isRecord(payload.arguments) ? payload.arguments : undefined,
|
||||
state: typeof payload.status === 'string' ? payload.status : 'executing',
|
||||
@@ -270,9 +298,13 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
|
||||
const spanData = asPayloadRecord(parsed.payload.data)
|
||||
const parentToolCallId =
|
||||
scopedParentToolCallId ??
|
||||
(typeof spanData?.tool_call_id === 'string' ? spanData.tool_call_id : undefined)
|
||||
const parentToolCallIdFromData =
|
||||
typeof spanData?.tool_call_id === 'string'
|
||||
? spanData.tool_call_id
|
||||
: typeof spanData?.toolCallId === 'string'
|
||||
? spanData.toolCallId
|
||||
: undefined
|
||||
const parentToolCallId = scopedParentToolCallId ?? parentToolCallIdFromData
|
||||
const name = typeof parsed.payload.agent === 'string' ? parsed.payload.agent : scopedAgentId
|
||||
if (parsed.payload.event === MothershipStreamV1SpanLifecycleEvent.start && name) {
|
||||
if (parentToolCallId) {
|
||||
@@ -285,6 +317,7 @@ function buildLiveAssistantMessage(params: {
|
||||
kind: MothershipStreamV1SpanPayloadKind.subagent,
|
||||
lifecycle: MothershipStreamV1SpanLifecycleEvent.start,
|
||||
content: name,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
})
|
||||
continue
|
||||
}
|
||||
@@ -308,6 +341,7 @@ function buildLiveAssistantMessage(params: {
|
||||
type: MothershipStreamV1EventType.span,
|
||||
kind: MothershipStreamV1SpanPayloadKind.subagent,
|
||||
lifecycle: MothershipStreamV1SpanLifecycleEvent.end,
|
||||
...(parentToolCallId ? { parentToolCallId } : {}),
|
||||
})
|
||||
}
|
||||
continue
|
||||
@@ -343,8 +377,10 @@ function buildLiveAssistantMessage(params: {
|
||||
}
|
||||
const prefix = runningText.length > 0 && !runningText.endsWith('\n') ? '\n' : ''
|
||||
const content = `${prefix}${tag}`
|
||||
const errorParent = resolveParentForSubagentBlock(scopedSubagent, scopedParentToolCallId)
|
||||
appendTextBlock(blocks, content, {
|
||||
...(scopedSubagent ? { lane: 'subagent' as const } : {}),
|
||||
...(errorParent ? { parentToolCallId: errorParent } : {}),
|
||||
})
|
||||
runningText += content
|
||||
continue
|
||||
|
||||
@@ -41,6 +41,7 @@ export interface PersistedContentBlock {
|
||||
toolCall?: PersistedToolCall
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface PersistedFileAttachment {
|
||||
@@ -101,9 +102,16 @@ export function withBlockTiming<T>(target: T, src: { timestamp?: number; endedAt
|
||||
return target
|
||||
}
|
||||
|
||||
function withBlockParent<T>(target: T, src: { parentToolCallId?: string }): T {
|
||||
if (src.parentToolCallId) {
|
||||
;(target as { parentToolCallId?: string }).parentToolCallId = src.parentToolCallId
|
||||
}
|
||||
return target
|
||||
}
|
||||
|
||||
function mapContentBlock(block: ContentBlock): PersistedContentBlock {
|
||||
const persisted = mapContentBlockBody(block)
|
||||
return withBlockTiming(persisted, block)
|
||||
return withBlockParent(withBlockTiming(persisted, block), block)
|
||||
}
|
||||
|
||||
function mapContentBlockBody(block: ContentBlock): PersistedContentBlock {
|
||||
@@ -265,6 +273,7 @@ interface RawBlock {
|
||||
status?: string
|
||||
timestamp?: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
toolCall?: {
|
||||
id?: string
|
||||
name?: string
|
||||
@@ -321,6 +330,7 @@ function normalizeCanonicalBlock(block: RawBlock): PersistedContentBlock {
|
||||
if (block.kind) result.kind = block.kind as MothershipStreamV1SpanPayloadKind
|
||||
if (block.lifecycle) result.lifecycle = block.lifecycle as MothershipStreamV1SpanLifecycleEvent
|
||||
if (block.status) result.status = block.status as MothershipStreamV1CompletionStatus
|
||||
if (block.parentToolCallId) result.parentToolCallId = block.parentToolCallId
|
||||
if (block.toolCall) {
|
||||
result.toolCall = {
|
||||
id: block.toolCall.id ?? '',
|
||||
@@ -438,6 +448,9 @@ function normalizeBlock(block: RawBlock): PersistedContentBlock {
|
||||
if (typeof block.endedAt === 'number' && result.endedAt === undefined) {
|
||||
result.endedAt = block.endedAt
|
||||
}
|
||||
if (block.parentToolCallId && result.parentToolCallId === undefined) {
|
||||
result.parentToolCallId = block.parentToolCallId
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
|
||||
@@ -10,13 +10,24 @@ import {
|
||||
MothershipStreamV1ToolOutcome,
|
||||
MothershipStreamV1ToolPhase,
|
||||
} from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
|
||||
vi.mock('@/lib/copilot/request/session', async () => {
|
||||
const actual = await vi.importActual<typeof import('@/lib/copilot/request/session')>(
|
||||
'@/lib/copilot/request/session'
|
||||
)
|
||||
return {
|
||||
...actual,
|
||||
hasAbortMarker: vi.fn().mockResolvedValue(false),
|
||||
}
|
||||
})
|
||||
|
||||
import {
|
||||
buildPreviewContentUpdate,
|
||||
decodeJsonStringPrefix,
|
||||
extractEditContent,
|
||||
runStreamLoop,
|
||||
} from '@/lib/copilot/request/go/stream'
|
||||
import { createEvent } from '@/lib/copilot/request/session'
|
||||
import { AbortReason, createEvent, hasAbortMarker } from '@/lib/copilot/request/session'
|
||||
import { RequestTraceV1Outcome, TraceCollector } from '@/lib/copilot/request/trace'
|
||||
import type { ExecutionContext, StreamingContext } from '@/lib/copilot/request/types'
|
||||
|
||||
@@ -194,6 +205,64 @@ describe('copilot go stream helpers', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('does not retry transient backend statuses because stream requests are not idempotent', async () => {
|
||||
vi.mocked(fetch).mockResolvedValueOnce(new Response('bad gateway', { status: 502 }))
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
|
||||
await expect(
|
||||
runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
})
|
||||
).rejects.toMatchObject({
|
||||
name: 'CopilotBackendError',
|
||||
status: 502,
|
||||
body: 'bad gateway',
|
||||
})
|
||||
|
||||
expect(fetch).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('does not retry non-transient backend statuses before the SSE stream opens', async () => {
|
||||
vi.mocked(fetch).mockResolvedValueOnce(new Response('limit reached', { status: 402 }))
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
|
||||
await expect(
|
||||
runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
})
|
||||
).rejects.toThrow('Usage limit reached')
|
||||
|
||||
expect(fetch).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('does not retry network errors because Go may already be executing the request', async () => {
|
||||
vi.mocked(fetch).mockRejectedValueOnce(new TypeError('fetch failed'))
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
|
||||
await expect(
|
||||
runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
})
|
||||
).rejects.toThrow('fetch failed')
|
||||
|
||||
expect(fetch).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('fails closed when the shared stream ends before a terminal event', async () => {
|
||||
const textEvent = createEvent({
|
||||
streamId: 'stream-1',
|
||||
@@ -227,6 +296,137 @@ describe('copilot go stream helpers', () => {
|
||||
).toBe(true)
|
||||
})
|
||||
|
||||
it('reclassifies as aborted when the body closes without terminal but the abort marker is set', async () => {
|
||||
const textEvent = createEvent({
|
||||
streamId: 'stream-1',
|
||||
cursor: '1',
|
||||
seq: 1,
|
||||
requestId: 'req-1',
|
||||
type: MothershipStreamV1EventType.text,
|
||||
payload: {
|
||||
channel: 'assistant',
|
||||
text: 'partial response',
|
||||
},
|
||||
})
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce(createSseResponse([textEvent]))
|
||||
vi.mocked(hasAbortMarker).mockResolvedValueOnce(true)
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
|
||||
await runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
})
|
||||
|
||||
expect(hasAbortMarker).toHaveBeenCalledWith(context.messageId)
|
||||
expect(context.wasAborted).toBe(true)
|
||||
expect(
|
||||
context.errors.some((message) =>
|
||||
message.includes('Copilot backend stream ended before a terminal event')
|
||||
)
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('invokes onAbortObserved with MarkerObservedAtBodyClose when reclassifying via the abort marker', async () => {
|
||||
const textEvent = createEvent({
|
||||
streamId: 'stream-1',
|
||||
cursor: '1',
|
||||
seq: 1,
|
||||
requestId: 'req-1',
|
||||
type: MothershipStreamV1EventType.text,
|
||||
payload: {
|
||||
channel: 'assistant',
|
||||
text: 'partial response',
|
||||
},
|
||||
})
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce(createSseResponse([textEvent]))
|
||||
vi.mocked(hasAbortMarker).mockResolvedValueOnce(true)
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
const onAbortObserved = vi.fn()
|
||||
|
||||
await runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
onAbortObserved,
|
||||
})
|
||||
|
||||
expect(onAbortObserved).toHaveBeenCalledTimes(1)
|
||||
expect(onAbortObserved).toHaveBeenCalledWith(AbortReason.MarkerObservedAtBodyClose)
|
||||
expect(context.wasAborted).toBe(true)
|
||||
})
|
||||
|
||||
it('does not invoke onAbortObserved when no abort marker is present at body close', async () => {
|
||||
const textEvent = createEvent({
|
||||
streamId: 'stream-1',
|
||||
cursor: '1',
|
||||
seq: 1,
|
||||
requestId: 'req-1',
|
||||
type: MothershipStreamV1EventType.text,
|
||||
payload: {
|
||||
channel: 'assistant',
|
||||
text: 'partial response',
|
||||
},
|
||||
})
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce(createSseResponse([textEvent]))
|
||||
vi.mocked(hasAbortMarker).mockResolvedValueOnce(false)
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
const onAbortObserved = vi.fn()
|
||||
|
||||
await expect(
|
||||
runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
onAbortObserved,
|
||||
})
|
||||
).rejects.toThrow('Copilot backend stream ended before a terminal event')
|
||||
|
||||
expect(onAbortObserved).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('still fails closed when the body closes without terminal and the abort marker check throws', async () => {
|
||||
const textEvent = createEvent({
|
||||
streamId: 'stream-1',
|
||||
cursor: '1',
|
||||
seq: 1,
|
||||
requestId: 'req-1',
|
||||
type: MothershipStreamV1EventType.text,
|
||||
payload: {
|
||||
channel: 'assistant',
|
||||
text: 'partial response',
|
||||
},
|
||||
})
|
||||
|
||||
vi.mocked(fetch).mockResolvedValueOnce(createSseResponse([textEvent]))
|
||||
vi.mocked(hasAbortMarker).mockRejectedValueOnce(new Error('redis unavailable'))
|
||||
|
||||
const context = createStreamingContext()
|
||||
const execContext: ExecutionContext = {
|
||||
userId: 'user-1',
|
||||
workflowId: 'workflow-1',
|
||||
}
|
||||
|
||||
await expect(
|
||||
runStreamLoop('https://example.com/mothership/stream', {}, context, execContext, {
|
||||
timeout: 1000,
|
||||
})
|
||||
).rejects.toThrow('Copilot backend stream ended before a terminal event')
|
||||
expect(context.wasAborted).toBe(false)
|
||||
})
|
||||
|
||||
it('fails closed when the shared stream receives an invalid event', async () => {
|
||||
vi.mocked(fetch).mockResolvedValueOnce(
|
||||
createSseResponse([
|
||||
|
||||
@@ -30,7 +30,9 @@ import {
|
||||
} from '@/lib/copilot/request/handlers/types'
|
||||
import { getCopilotTracer } from '@/lib/copilot/request/otel'
|
||||
import {
|
||||
AbortReason,
|
||||
eventToStreamEvent,
|
||||
hasAbortMarker,
|
||||
isSubagentSpanStreamEvent,
|
||||
parsePersistedStreamEventEnvelope,
|
||||
} from '@/lib/copilot/request/session'
|
||||
@@ -134,17 +136,27 @@ export async function runStreamLoop(
|
||||
requestBodyBytes,
|
||||
})
|
||||
const fetchStart = performance.now()
|
||||
const response = await fetchGo(fetchUrl, {
|
||||
...fetchOptions,
|
||||
signal: abortSignal,
|
||||
otelContext: options.otelContext,
|
||||
spanName: `sim → go ${pathname}`,
|
||||
operation: 'stream',
|
||||
attributes: {
|
||||
[TraceAttr.CopilotStream]: true,
|
||||
...(requestBodyBytes ? { [TraceAttr.HttpRequestContentLength]: requestBodyBytes } : {}),
|
||||
},
|
||||
})
|
||||
let response: Response
|
||||
try {
|
||||
response = await fetchGo(fetchUrl, {
|
||||
...fetchOptions,
|
||||
signal: abortSignal,
|
||||
otelContext: options.otelContext,
|
||||
spanName: `sim → go ${pathname}`,
|
||||
operation: 'stream',
|
||||
attributes: {
|
||||
[TraceAttr.CopilotStream]: true,
|
||||
...(requestBodyBytes ? { [TraceAttr.HttpRequestContentLength]: requestBodyBytes } : {}),
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
fetchSpan.attributes = {
|
||||
...(fetchSpan.attributes ?? {}),
|
||||
headersMs: Math.round(performance.now() - fetchStart),
|
||||
}
|
||||
context.trace.endSpan(fetchSpan, abortSignal?.aborted ? 'cancelled' : 'error')
|
||||
throw error
|
||||
}
|
||||
const headersElapsedMs = Math.round(performance.now() - fetchStart)
|
||||
fetchSpan.attributes = {
|
||||
...(fetchSpan.attributes ?? {}),
|
||||
@@ -349,28 +361,29 @@ export async function runStreamLoop(
|
||||
flushSubagentThinkingBlock(context)
|
||||
flushThinkingBlock(context)
|
||||
if (spanEvt === MothershipStreamV1SpanLifecycleEvent.start) {
|
||||
const lastParent = context.subAgentParentStack[context.subAgentParentStack.length - 1]
|
||||
const lastBlock = context.contentBlocks[context.contentBlocks.length - 1]
|
||||
if (toolCallId) {
|
||||
if (lastParent !== toolCallId) {
|
||||
if (!context.subAgentParentStack.includes(toolCallId)) {
|
||||
context.subAgentParentStack.push(toolCallId)
|
||||
}
|
||||
context.subAgentParentToolCallId = toolCallId
|
||||
context.subAgentContent[toolCallId] ??= ''
|
||||
context.subAgentToolCalls[toolCallId] ??= []
|
||||
}
|
||||
if (
|
||||
subagentName &&
|
||||
!(
|
||||
lastParent === toolCallId &&
|
||||
lastBlock?.type === 'subagent' &&
|
||||
lastBlock.content === subagentName
|
||||
)
|
||||
) {
|
||||
context.contentBlocks.push({
|
||||
type: 'subagent',
|
||||
content: subagentName,
|
||||
timestamp: Date.now(),
|
||||
if (toolCallId && subagentName) {
|
||||
const openParents = (context.openSubagentParents ??= new Set<string>())
|
||||
if (!openParents.has(toolCallId)) {
|
||||
openParents.add(toolCallId)
|
||||
context.contentBlocks.push({
|
||||
type: 'subagent',
|
||||
content: subagentName,
|
||||
parentToolCallId: toolCallId,
|
||||
timestamp: Date.now(),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
logger.warn('subagent start missing toolCallId or agent name', {
|
||||
hasToolCallId: Boolean(toolCallId),
|
||||
hasSubagentName: Boolean(subagentName),
|
||||
})
|
||||
}
|
||||
return
|
||||
@@ -379,27 +392,33 @@ export async function runStreamLoop(
|
||||
if (isPendingPause) {
|
||||
return
|
||||
}
|
||||
if (context.subAgentParentStack.length > 0) {
|
||||
context.subAgentParentStack.pop()
|
||||
if (toolCallId) {
|
||||
const idx = context.subAgentParentStack.lastIndexOf(toolCallId)
|
||||
if (idx >= 0) {
|
||||
context.subAgentParentStack.splice(idx, 1)
|
||||
} else {
|
||||
logger.warn('subagent end without matching start', { toolCallId })
|
||||
}
|
||||
} else {
|
||||
logger.warn('subagent end without matching start')
|
||||
logger.warn('subagent end missing toolCallId')
|
||||
}
|
||||
context.subAgentParentToolCallId =
|
||||
context.subAgentParentStack.length > 0
|
||||
? context.subAgentParentStack[context.subAgentParentStack.length - 1]
|
||||
: undefined
|
||||
if (subagentName) {
|
||||
if (toolCallId) {
|
||||
for (let i = context.contentBlocks.length - 1; i >= 0; i--) {
|
||||
const b = context.contentBlocks[i]
|
||||
if (
|
||||
b.type === 'subagent' &&
|
||||
b.content === subagentName &&
|
||||
b.endedAt === undefined
|
||||
b.endedAt === undefined &&
|
||||
b.parentToolCallId === toolCallId
|
||||
) {
|
||||
b.endedAt = Date.now()
|
||||
break
|
||||
}
|
||||
}
|
||||
context.openSubagentParents?.delete(toolCallId)
|
||||
}
|
||||
return
|
||||
}
|
||||
@@ -426,16 +445,32 @@ export async function runStreamLoop(
|
||||
})
|
||||
|
||||
if (!context.streamComplete && !abortSignal?.aborted && !context.wasAborted) {
|
||||
const streamPath = new URL(fetchUrl).pathname
|
||||
const message = `Copilot backend stream ended before a terminal event on ${streamPath}`
|
||||
context.errors.push(message)
|
||||
logger.error('Copilot backend stream ended before a terminal event', {
|
||||
path: streamPath,
|
||||
requestId: context.requestId,
|
||||
messageId: context.messageId,
|
||||
})
|
||||
endedOn = CopilotSseCloseReason.ClosedNoTerminal
|
||||
throw new CopilotBackendError(message, { status: 503 })
|
||||
let abortRequested = false
|
||||
try {
|
||||
abortRequested = await hasAbortMarker(context.messageId)
|
||||
} catch (error) {
|
||||
logger.warn('Failed to read abort marker at body close', {
|
||||
streamId: context.messageId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
}
|
||||
|
||||
if (abortRequested) {
|
||||
options.onAbortObserved?.(AbortReason.MarkerObservedAtBodyClose)
|
||||
context.wasAborted = true
|
||||
endedOn = CopilotSseCloseReason.Aborted
|
||||
} else {
|
||||
const streamPath = new URL(fetchUrl).pathname
|
||||
const message = `Copilot backend stream ended before a terminal event on ${streamPath}`
|
||||
context.errors.push(message)
|
||||
logger.error('Copilot backend stream ended before a terminal event', {
|
||||
path: streamPath,
|
||||
requestId: context.requestId,
|
||||
messageId: context.messageId,
|
||||
})
|
||||
endedOn = CopilotSseCloseReason.ClosedNoTerminal
|
||||
throw new CopilotBackendError(message, { status: 503 })
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (error instanceof FatalSseEventError && !context.errors.includes(error.message)) {
|
||||
@@ -561,14 +596,14 @@ function stampSseReadLoopSpan(
|
||||
const nowWall = Date.now()
|
||||
const startWall = nowWall - (nowPerf - startPerfMs)
|
||||
|
||||
const terminalEventSeen = counters.eventsByType.complete > 0
|
||||
const terminalEventSeen = counters.eventsByType.complete > 0 || counters.eventsByType.error > 0
|
||||
// `terminal_event_missing` is the single-attribute dashboard signal
|
||||
// for the "disappeared response" bug class: the caller considered
|
||||
// this leg to be the final one (`context.streamComplete === true`)
|
||||
// but no `complete` event arrived on the wire. Tool-pause legs have
|
||||
// expectedTerminal=false and never trip this, so dashboards can
|
||||
// filter on `{ .copilot.sse.terminal_event_missing = true }` without
|
||||
// false positives.
|
||||
// but no terminal `complete` or `error` event arrived on the wire.
|
||||
// Tool-pause legs have expectedTerminal=false and never trip this, so
|
||||
// dashboards can filter on `{ .copilot.sse.terminal_event_missing = true }`
|
||||
// without false positives.
|
||||
const terminalEventMissing = opts.expectedTerminal && !terminalEventSeen
|
||||
|
||||
const tracer = getCopilotTracer()
|
||||
|
||||
@@ -22,10 +22,17 @@ export function handleTextEvent(scope: ToolScope): StreamHandler {
|
||||
const parentToolCallId = getScopedParentToolCallId(event, context)
|
||||
if (!parentToolCallId) return
|
||||
if (event.payload.channel === MothershipStreamV1TextChannel.thinking) {
|
||||
if (
|
||||
context.currentSubagentThinkingBlock &&
|
||||
context.currentSubagentThinkingBlock.parentToolCallId !== parentToolCallId
|
||||
) {
|
||||
flushSubagentThinkingBlock(context)
|
||||
}
|
||||
if (!context.currentSubagentThinkingBlock) {
|
||||
context.currentSubagentThinkingBlock = {
|
||||
type: 'subagent_thinking',
|
||||
content: '',
|
||||
parentToolCallId,
|
||||
timestamp: Date.now(),
|
||||
}
|
||||
}
|
||||
@@ -40,7 +47,7 @@ export function handleTextEvent(scope: ToolScope): StreamHandler {
|
||||
}
|
||||
context.subAgentContent[parentToolCallId] =
|
||||
(context.subAgentContent[parentToolCallId] || '') + chunk
|
||||
addContentBlock(context, { type: 'subagent_text', content: chunk })
|
||||
addContentBlock(context, { type: 'subagent_text', content: chunk, parentToolCallId })
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -340,6 +340,7 @@ function registerSubagentToolCall(
|
||||
type: 'tool_call',
|
||||
toolCall,
|
||||
calledBy: parentToolCall?.name,
|
||||
parentToolCallId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -53,10 +53,10 @@ export async function runHeadlessCopilotLifecycle(
|
||||
simRequestId,
|
||||
otelContext,
|
||||
})
|
||||
outcome = options.abortSignal?.aborted
|
||||
? RequestTraceV1Outcome.cancelled
|
||||
: result.success
|
||||
? RequestTraceV1Outcome.success
|
||||
outcome = result.success
|
||||
? RequestTraceV1Outcome.success
|
||||
: options.abortSignal?.aborted || result.cancelled
|
||||
? RequestTraceV1Outcome.cancelled
|
||||
: RequestTraceV1Outcome.error
|
||||
return result
|
||||
} catch (error) {
|
||||
|
||||
@@ -6,7 +6,10 @@ import { propagation, trace } from '@opentelemetry/api'
|
||||
import { W3CTraceContextPropagator } from '@opentelemetry/core'
|
||||
import { BasicTracerProvider } from '@opentelemetry/sdk-trace-base'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { MothershipStreamV1EventType } from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
import {
|
||||
MothershipStreamV1CompletionStatus,
|
||||
MothershipStreamV1EventType,
|
||||
} from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
|
||||
const {
|
||||
runCopilotLifecycle,
|
||||
@@ -60,6 +63,7 @@ vi.mock('@/lib/copilot/request/session', () => ({
|
||||
registerActiveStream: vi.fn(),
|
||||
unregisterActiveStream: vi.fn(),
|
||||
startAbortPoller: vi.fn().mockReturnValue(setInterval(() => {}, 999999)),
|
||||
isExplicitStopReason: vi.fn().mockReturnValue(false),
|
||||
SSE_RESPONSE_HEADERS: {},
|
||||
StreamWriter: vi.fn().mockImplementation(() => ({
|
||||
attach: vi.fn().mockImplementation((ctrl: ReadableStreamDefaultController) => {
|
||||
@@ -211,6 +215,46 @@ describe('createSSEStream terminal error handling', () => {
|
||||
expect(scheduleBufferCleanup).toHaveBeenCalledWith('stream-1')
|
||||
})
|
||||
|
||||
it('publishes a cancelled completion (not an error) when the orchestrator reports cancelled without abortSignal aborted', async () => {
|
||||
runCopilotLifecycle.mockResolvedValue({
|
||||
success: false,
|
||||
cancelled: true,
|
||||
content: '',
|
||||
contentBlocks: [],
|
||||
toolCalls: [],
|
||||
})
|
||||
|
||||
const stream = createSSEStream({
|
||||
requestPayload: { message: 'hello' },
|
||||
userId: 'user-1',
|
||||
streamId: 'stream-1',
|
||||
executionId: 'exec-1',
|
||||
runId: 'run-1',
|
||||
currentChat: null,
|
||||
isNewChat: false,
|
||||
message: 'hello',
|
||||
titleModel: 'gpt-5.4',
|
||||
requestId: 'req-cancelled',
|
||||
orchestrateOptions: {},
|
||||
})
|
||||
|
||||
await drainStream(stream)
|
||||
|
||||
expect(appendEvent).not.toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: MothershipStreamV1EventType.error,
|
||||
})
|
||||
)
|
||||
expect(appendEvent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
type: MothershipStreamV1EventType.complete,
|
||||
payload: expect.objectContaining({
|
||||
status: MothershipStreamV1CompletionStatus.cancelled,
|
||||
}),
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('passes an OTel context into the streaming lifecycle', async () => {
|
||||
let lifecycleTraceparent = ''
|
||||
runCopilotLifecycle.mockImplementation(async (_payload, options) => {
|
||||
|
||||
@@ -210,6 +210,7 @@ export function createSSEStream(params: StreamingOrchestrationParams): ReadableS
|
||||
|
||||
const abortPoller = startAbortPoller(streamId, abortController, {
|
||||
requestId,
|
||||
chatId,
|
||||
})
|
||||
publisher.startKeepalive()
|
||||
|
||||
@@ -248,6 +249,11 @@ export function createSSEStream(params: StreamingOrchestrationParams): ReadableS
|
||||
onEvent: async (event) => {
|
||||
await publisher.publish(event)
|
||||
},
|
||||
onAbortObserved: (reason) => {
|
||||
if (!abortController.signal.aborted) {
|
||||
abortController.abort(reason)
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
lifecycleResult = result
|
||||
@@ -265,7 +271,7 @@ export function createSSEStream(params: StreamingOrchestrationParams): ReadableS
|
||||
// 3. Otherwise → error.
|
||||
outcome = result.success
|
||||
? RequestTraceV1Outcome.success
|
||||
: abortController.signal.aborted || publisher.clientDisconnected
|
||||
: result.cancelled || abortController.signal.aborted || publisher.clientDisconnected
|
||||
? RequestTraceV1Outcome.cancelled
|
||||
: RequestTraceV1Outcome.error
|
||||
if (outcome === RequestTraceV1Outcome.cancelled) {
|
||||
|
||||
@@ -22,6 +22,12 @@ export const AbortReason = {
|
||||
* that the node that DID receive it wrote, and aborts on the poll.
|
||||
*/
|
||||
RedisPoller: 'redis_abort_marker:poller',
|
||||
/**
|
||||
* Cross-process stop: same root cause as `RedisPoller`, but observed
|
||||
* by `runStreamLoop` at body close (the Go body ended before the
|
||||
* 250ms poller's next tick) rather than by the polling timer.
|
||||
*/
|
||||
MarkerObservedAtBodyClose: 'redis_abort_marker:body_close',
|
||||
/** Internal timeout on the outbound explicit-abort fetch to Go. */
|
||||
ExplicitAbortFetchTimeout: 'timeout:go_explicit_abort_fetch',
|
||||
} as const
|
||||
@@ -38,5 +44,9 @@ export type AbortReasonValue = (typeof AbortReason)[keyof typeof AbortReason]
|
||||
* stops, mirroring `requestctx.IsExplicitUserStop` on the Go side.
|
||||
*/
|
||||
export function isExplicitStopReason(reason: unknown): boolean {
|
||||
return reason === AbortReason.UserStop || reason === AbortReason.RedisPoller
|
||||
return (
|
||||
reason === AbortReason.UserStop ||
|
||||
reason === AbortReason.RedisPoller ||
|
||||
reason === AbortReason.MarkerObservedAtBodyClose
|
||||
)
|
||||
}
|
||||
|
||||
161
apps/sim/lib/copilot/request/session/abort.test.ts
Normal file
161
apps/sim/lib/copilot/request/session/abort.test.ts
Normal file
@@ -0,0 +1,161 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
import { redisConfigMock, redisConfigMockFns } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const { mockHasAbortMarker, mockClearAbortMarker, mockWriteAbortMarker } = vi.hoisted(() => ({
|
||||
mockHasAbortMarker: vi.fn().mockResolvedValue(false),
|
||||
mockClearAbortMarker: vi.fn().mockResolvedValue(undefined),
|
||||
mockWriteAbortMarker: vi.fn().mockResolvedValue(undefined),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/redis', () => redisConfigMock)
|
||||
vi.mock('@/lib/copilot/request/session/buffer', () => ({
|
||||
hasAbortMarker: mockHasAbortMarker,
|
||||
clearAbortMarker: mockClearAbortMarker,
|
||||
writeAbortMarker: mockWriteAbortMarker,
|
||||
}))
|
||||
vi.mock('@/lib/copilot/request/otel', () => ({
|
||||
withCopilotSpan: (_span: unknown, _attrs: unknown, fn: (span: unknown) => unknown) =>
|
||||
fn({ setAttribute: vi.fn() }),
|
||||
}))
|
||||
|
||||
import { startAbortPoller } from '@/lib/copilot/request/session/abort'
|
||||
|
||||
describe('startAbortPoller heartbeat', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
vi.useFakeTimers()
|
||||
mockHasAbortMarker.mockResolvedValue(false)
|
||||
redisConfigMockFns.mockExtendLock.mockResolvedValue(true)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers()
|
||||
})
|
||||
|
||||
it('extends the chat stream lock approximately every heartbeat interval', async () => {
|
||||
const controller = new AbortController()
|
||||
const streamId = 'stream-heartbeat-1'
|
||||
const chatId = 'chat-heartbeat-1'
|
||||
|
||||
const interval = startAbortPoller(streamId, controller, { chatId })
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(15_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).not.toHaveBeenCalled()
|
||||
|
||||
await vi.advanceTimersByTimeAsync(6_000)
|
||||
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(1)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenLastCalledWith(
|
||||
`copilot:chat-stream-lock:${chatId}`,
|
||||
streamId,
|
||||
60
|
||||
)
|
||||
|
||||
await vi.advanceTimersByTimeAsync(20_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(2)
|
||||
|
||||
await vi.advanceTimersByTimeAsync(20_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(3)
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
|
||||
it('does not extend the lock when no chatId is passed (backward compat)', async () => {
|
||||
const controller = new AbortController()
|
||||
const interval = startAbortPoller('stream-no-chat', controller, {})
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(90_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).not.toHaveBeenCalled()
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
|
||||
it('retries on the next tick when extendLock throws (no 20s backoff)', async () => {
|
||||
const controller = new AbortController()
|
||||
const streamId = 'stream-retry'
|
||||
const chatId = 'chat-retry'
|
||||
|
||||
redisConfigMockFns.mockExtendLock.mockRejectedValueOnce(new Error('redis down'))
|
||||
|
||||
const interval = startAbortPoller(streamId, controller, { chatId })
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(20_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(1)
|
||||
|
||||
await vi.advanceTimersByTimeAsync(1_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(2)
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
|
||||
it('aborts the controller before clearing the marker so the marker is never observable as cleared while the signal is still unaborted', async () => {
|
||||
const controller = new AbortController()
|
||||
const streamId = 'stream-order-1'
|
||||
|
||||
let signalAbortedWhenMarkerCleared: boolean | null = null
|
||||
mockClearAbortMarker.mockImplementationOnce(async () => {
|
||||
signalAbortedWhenMarkerCleared = controller.signal.aborted
|
||||
})
|
||||
mockHasAbortMarker.mockResolvedValueOnce(true)
|
||||
|
||||
const interval = startAbortPoller(streamId, controller, {})
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(300)
|
||||
|
||||
expect(mockClearAbortMarker).toHaveBeenCalledWith(streamId)
|
||||
expect(signalAbortedWhenMarkerCleared).toBe(true)
|
||||
expect(controller.signal.aborted).toBe(true)
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
|
||||
it('does not clear the marker when the signal is already aborted (no double abort)', async () => {
|
||||
const controller = new AbortController()
|
||||
controller.abort('preexisting')
|
||||
const streamId = 'stream-order-2'
|
||||
|
||||
mockHasAbortMarker.mockResolvedValueOnce(true)
|
||||
|
||||
const interval = startAbortPoller(streamId, controller, {})
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(300)
|
||||
|
||||
expect(mockClearAbortMarker).not.toHaveBeenCalled()
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
|
||||
it('stops heartbeating after ownership is lost', async () => {
|
||||
const controller = new AbortController()
|
||||
const streamId = 'stream-lost'
|
||||
const chatId = 'chat-lost'
|
||||
|
||||
redisConfigMockFns.mockExtendLock.mockResolvedValueOnce(false)
|
||||
|
||||
const interval = startAbortPoller(streamId, controller, { chatId })
|
||||
|
||||
try {
|
||||
await vi.advanceTimersByTimeAsync(21_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(1)
|
||||
|
||||
await vi.advanceTimersByTimeAsync(60_000)
|
||||
expect(redisConfigMockFns.mockExtendLock).toHaveBeenCalledTimes(1)
|
||||
} finally {
|
||||
clearInterval(interval)
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -5,7 +5,7 @@ import { AbortBackend } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { withCopilotSpan } from '@/lib/copilot/request/otel'
|
||||
import { acquireLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||
import { acquireLock, extendLock, getRedisClient, releaseLock } from '@/lib/core/config/redis'
|
||||
import { AbortReason } from './abort-reason'
|
||||
import { clearAbortMarker, hasAbortMarker, writeAbortMarker } from './buffer'
|
||||
|
||||
@@ -17,8 +17,23 @@ const pendingChatStreams = new Map<
|
||||
{ promise: Promise<void>; resolve: () => void; streamId: string }
|
||||
>()
|
||||
|
||||
const DEFAULT_ABORT_POLL_MS = 1000
|
||||
const CHAT_STREAM_LOCK_TTL_SECONDS = 2 * 60 * 60
|
||||
const DEFAULT_ABORT_POLL_MS = 250
|
||||
|
||||
/**
|
||||
* TTL for the per-chat stream lock. Kept short so that if the Sim pod
|
||||
* holding the lock dies (SIGKILL, OOM, a SIGTERM drain that doesn't
|
||||
* reach the release path), the lock self-heals inside a minute rather
|
||||
* than stranding the chat for hours. A live stream keeps the lock alive
|
||||
* via `CHAT_STREAM_LOCK_HEARTBEAT_INTERVAL_MS` heartbeats.
|
||||
*/
|
||||
const CHAT_STREAM_LOCK_TTL_SECONDS = 60
|
||||
|
||||
/**
|
||||
* Heartbeat cadence for extending the per-chat stream lock. Set to a
|
||||
* third of the TTL so one missed beat still leaves room for recovery
|
||||
* before the lock expires under a still-live stream.
|
||||
*/
|
||||
const CHAT_STREAM_LOCK_HEARTBEAT_INTERVAL_MS = 20_000
|
||||
|
||||
function registerPendingChatStream(chatId: string, streamId: string): void {
|
||||
let resolve!: () => void
|
||||
@@ -262,10 +277,14 @@ const pollingStreams = new Set<string>()
|
||||
export function startAbortPoller(
|
||||
streamId: string,
|
||||
abortController: AbortController,
|
||||
options?: { pollMs?: number; requestId?: string }
|
||||
options?: { pollMs?: number; requestId?: string; chatId?: string }
|
||||
): ReturnType<typeof setInterval> {
|
||||
const pollMs = options?.pollMs ?? DEFAULT_ABORT_POLL_MS
|
||||
const requestId = options?.requestId
|
||||
const chatId = options?.chatId
|
||||
|
||||
let lastHeartbeatAt = Date.now()
|
||||
let heartbeatOwnershipLost = false
|
||||
|
||||
return setInterval(() => {
|
||||
if (pollingStreams.has(streamId)) return
|
||||
@@ -287,6 +306,33 @@ export function startAbortPoller(
|
||||
} finally {
|
||||
pollingStreams.delete(streamId)
|
||||
}
|
||||
|
||||
if (!chatId || heartbeatOwnershipLost) return
|
||||
if (Date.now() - lastHeartbeatAt < CHAT_STREAM_LOCK_HEARTBEAT_INTERVAL_MS) return
|
||||
|
||||
try {
|
||||
const owned = await extendLock(
|
||||
getChatStreamLockKey(chatId),
|
||||
streamId,
|
||||
CHAT_STREAM_LOCK_TTL_SECONDS
|
||||
)
|
||||
lastHeartbeatAt = Date.now()
|
||||
if (!owned) {
|
||||
heartbeatOwnershipLost = true
|
||||
logger.warn('Lost ownership of chat stream lock — stopping heartbeat', {
|
||||
chatId,
|
||||
streamId,
|
||||
...(requestId ? { requestId } : {}),
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn('Failed to extend chat stream lock TTL', {
|
||||
chatId,
|
||||
streamId,
|
||||
...(requestId ? { requestId } : {}),
|
||||
error: toError(error).message,
|
||||
})
|
||||
}
|
||||
})()
|
||||
}, pollMs)
|
||||
}
|
||||
|
||||
@@ -165,6 +165,7 @@ function isStreamRef(value: unknown): value is MothershipStreamV1StreamRef {
|
||||
return (
|
||||
isRecord(value) &&
|
||||
typeof value.streamId === 'string' &&
|
||||
value.streamId.length > 0 &&
|
||||
isOptionalString(value.chatId) &&
|
||||
isOptionalString(value.cursor)
|
||||
)
|
||||
|
||||
@@ -56,6 +56,7 @@ export interface ContentBlock {
|
||||
calledBy?: string
|
||||
timestamp: number
|
||||
endedAt?: number
|
||||
parentToolCallId?: string
|
||||
}
|
||||
|
||||
export interface StreamingContext {
|
||||
@@ -86,6 +87,7 @@ export interface StreamingContext {
|
||||
subAgentParentStack: string[]
|
||||
subAgentContent: Record<string, string>
|
||||
subAgentToolCalls: Record<string, ToolCallState[]>
|
||||
openSubagentParents?: Set<string>
|
||||
pendingContent: string
|
||||
streamComplete: boolean
|
||||
wasAborted: boolean
|
||||
@@ -136,23 +138,12 @@ export interface OrchestratorOptions {
|
||||
onComplete?: (result: OrchestratorResult) => void | Promise<void>
|
||||
onError?: (error: Error) => void | Promise<void>
|
||||
abortSignal?: AbortSignal
|
||||
onAbortObserved?: (reason: string) => void
|
||||
interactive?: boolean
|
||||
}
|
||||
|
||||
export interface OrchestratorResult {
|
||||
success: boolean
|
||||
/**
|
||||
* True iff the non-success outcome was a user-initiated cancel
|
||||
* (abort signal fired or client disconnected). Lets callers treat
|
||||
* cancels differently from actual errors — notably, `buildOnComplete`
|
||||
* must NOT finalize the chat row on cancel, because the browser's
|
||||
* `/api/copilot/chat/stop` POST owns writing the partial assistant
|
||||
* content and clearing `conversationId` in one UPDATE. Finalizing
|
||||
* here would race and clear `conversationId` first, making the stop
|
||||
* UPDATE match zero rows and the partial content vanish on refetch.
|
||||
*
|
||||
* Always false when `success=true`.
|
||||
*/
|
||||
cancelled?: boolean
|
||||
content: string
|
||||
contentBlocks: ContentBlock[]
|
||||
|
||||
@@ -3,10 +3,11 @@ import { credential } from '@sim/db/schema'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/request/types'
|
||||
import { getCredentialActorContext } from '@/lib/credentials/access'
|
||||
|
||||
export function executeManageCredential(
|
||||
rawParams: Record<string, unknown>,
|
||||
_context: ExecutionContext
|
||||
context: ExecutionContext
|
||||
): Promise<ToolCallResult> {
|
||||
const params = rawParams as {
|
||||
operation: string
|
||||
@@ -17,26 +18,30 @@ export function executeManageCredential(
|
||||
const { operation, displayName } = params
|
||||
return (async () => {
|
||||
try {
|
||||
if (!context?.userId) {
|
||||
return { success: false, error: 'Authentication required' }
|
||||
}
|
||||
|
||||
switch (operation) {
|
||||
case 'rename': {
|
||||
const credentialId = params.credentialId
|
||||
if (!credentialId) return { success: false, error: 'credentialId is required for rename' }
|
||||
if (!displayName) return { success: false, error: 'displayName is required for rename' }
|
||||
const [row] = await db
|
||||
.select({
|
||||
id: credential.id,
|
||||
type: credential.type,
|
||||
displayName: credential.displayName,
|
||||
})
|
||||
.from(credential)
|
||||
.where(eq(credential.id, credentialId))
|
||||
.limit(1)
|
||||
if (!row) return { success: false, error: 'Credential not found' }
|
||||
if (row.type !== 'oauth')
|
||||
|
||||
const actor = await getCredentialActorContext(credentialId, context.userId)
|
||||
if (!actor.credential || !actor.hasWorkspaceAccess) {
|
||||
return { success: false, error: 'Credential not found' }
|
||||
}
|
||||
if (actor.credential.type !== 'oauth') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Only OAuth credentials can be managed with this tool.',
|
||||
}
|
||||
}
|
||||
if (!actor.canWriteWorkspace && !actor.isAdmin) {
|
||||
return { success: false, error: 'Write access required to rename this credential' }
|
||||
}
|
||||
|
||||
await db
|
||||
.update(credential)
|
||||
.set({ displayName, updatedAt: new Date() })
|
||||
@@ -53,12 +58,16 @@ export function executeManageCredential(
|
||||
const failed: string[] = []
|
||||
|
||||
for (const id of ids) {
|
||||
const [row] = await db
|
||||
.select({ id: credential.id, type: credential.type })
|
||||
.from(credential)
|
||||
.where(eq(credential.id, id))
|
||||
.limit(1)
|
||||
if (!row || row.type !== 'oauth') {
|
||||
const actor = await getCredentialActorContext(id, context.userId)
|
||||
if (
|
||||
!actor.credential ||
|
||||
!actor.hasWorkspaceAccess ||
|
||||
actor.credential.type !== 'oauth'
|
||||
) {
|
||||
failed.push(id)
|
||||
continue
|
||||
}
|
||||
if (!actor.canWriteWorkspace && !actor.isAdmin) {
|
||||
failed.push(id)
|
||||
continue
|
||||
}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { db } from '@sim/db'
|
||||
import { knowledgeBase } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import type { ExecutionContext, ToolCallResult } from '@/lib/copilot/request/types'
|
||||
import { restoreKnowledgeBase } from '@/lib/knowledge/service'
|
||||
import { getTableById, restoreTable } from '@/lib/table/service'
|
||||
@@ -10,6 +13,8 @@ import {
|
||||
} from '@/lib/uploads/contexts/workspace/workspace-file-manager'
|
||||
import { restoreWorkflow } from '@/lib/workflows/lifecycle'
|
||||
import { performRestoreFolder } from '@/lib/workflows/orchestration/folder-lifecycle'
|
||||
import { getWorkflowById } from '@/lib/workflows/utils'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('RestoreResource')
|
||||
|
||||
@@ -33,10 +38,25 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
const callerWorkspaceId = context.workspaceId
|
||||
|
||||
const hasWriteAccess = async (resourceWorkspaceId: string | null | undefined) => {
|
||||
if (!resourceWorkspaceId || resourceWorkspaceId !== callerWorkspaceId) return false
|
||||
const permission = await getUserEntityPermissions(
|
||||
context.userId,
|
||||
'workspace',
|
||||
resourceWorkspaceId
|
||||
)
|
||||
return permission === 'write' || permission === 'admin'
|
||||
}
|
||||
|
||||
try {
|
||||
switch (type) {
|
||||
case 'workflow': {
|
||||
const existing = await getWorkflowById(id, { includeArchived: true })
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Workflow not found' }
|
||||
}
|
||||
const result = await restoreWorkflow(id, { requestId })
|
||||
if (!result.restored) {
|
||||
return { success: false, error: 'Workflow not found or not archived' }
|
||||
@@ -50,9 +70,13 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'table': {
|
||||
const existing = await getTableById(id, { includeArchived: true })
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Table not found' }
|
||||
}
|
||||
await restoreTable(id, requestId)
|
||||
const table = await getTableById(id)
|
||||
const tableName = table?.name || id
|
||||
const tableName = table?.name || existing.name
|
||||
logger.info('Table restored via copilot', { tableId: id, name: tableName })
|
||||
return {
|
||||
success: true,
|
||||
@@ -62,6 +86,9 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'file': {
|
||||
if (!(await hasWriteAccess(context.workspaceId))) {
|
||||
return { success: false, error: 'File not found' }
|
||||
}
|
||||
await restoreWorkspaceFile(context.workspaceId, id)
|
||||
const fileRecord = await getWorkspaceFile(context.workspaceId, id)
|
||||
const fileName = fileRecord?.name || id
|
||||
@@ -74,6 +101,14 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'knowledgebase': {
|
||||
const [existing] = await db
|
||||
.select({ workspaceId: knowledgeBase.workspaceId })
|
||||
.from(knowledgeBase)
|
||||
.where(eq(knowledgeBase.id, id))
|
||||
.limit(1)
|
||||
if (!existing || !(await hasWriteAccess(existing.workspaceId))) {
|
||||
return { success: false, error: 'Knowledge base not found' }
|
||||
}
|
||||
await restoreKnowledgeBase(id, requestId)
|
||||
logger.info('Knowledge base restored via copilot', { knowledgeBaseId: id })
|
||||
return {
|
||||
@@ -83,6 +118,9 @@ export async function executeRestoreResource(
|
||||
}
|
||||
|
||||
case 'folder': {
|
||||
if (!(await hasWriteAccess(context.workspaceId))) {
|
||||
return { success: false, error: 'Folder not found' }
|
||||
}
|
||||
const result = await performRestoreFolder({
|
||||
folderId: id,
|
||||
workspaceId: context.workspaceId,
|
||||
|
||||
@@ -28,6 +28,7 @@ import {
|
||||
setWorkflowVariables,
|
||||
updateFolderRecord,
|
||||
updateWorkflowRecord,
|
||||
verifyFolderWorkspace,
|
||||
} from '@/lib/workflows/utils'
|
||||
import { hasExecutionResult } from '@/executor/utils/errors'
|
||||
import type { BlockState, WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
@@ -522,7 +523,13 @@ export async function executeMoveWorkflow(
|
||||
|
||||
for (const workflowId of workflowIds) {
|
||||
try {
|
||||
await ensureWorkflowAccess(workflowId, context.userId, 'write')
|
||||
const { workspaceId } = await ensureWorkflowAccess(workflowId, context.userId, 'write')
|
||||
if (folderId) {
|
||||
if (!workspaceId || !(await verifyFolderWorkspace(folderId, workspaceId))) {
|
||||
failed.push(workflowId)
|
||||
continue
|
||||
}
|
||||
}
|
||||
assertWorkflowMutationNotAborted(context)
|
||||
await updateWorkflowRecord(workflowId, { folderId })
|
||||
moved.push(workflowId)
|
||||
@@ -562,6 +569,14 @@ export async function executeMoveFolder(
|
||||
|
||||
const workspaceId = context.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||
await ensureWorkspaceAccess(workspaceId, context.userId, 'write')
|
||||
|
||||
if (!(await verifyFolderWorkspace(folderId, workspaceId))) {
|
||||
return { success: false, error: 'Folder not found' }
|
||||
}
|
||||
if (parentId && !(await verifyFolderWorkspace(parentId, workspaceId))) {
|
||||
return { success: false, error: 'Parent folder not found' }
|
||||
}
|
||||
|
||||
assertWorkflowMutationNotAborted(context)
|
||||
await updateFolderRecord(folderId, { parentId })
|
||||
|
||||
@@ -1007,6 +1022,11 @@ export async function executeRenameFolder(
|
||||
|
||||
const workspaceId = context.workspaceId || (await getDefaultWorkspaceId(context.userId))
|
||||
await ensureWorkspaceAccess(workspaceId, context.userId, 'write')
|
||||
|
||||
if (!(await verifyFolderWorkspace(folderId, workspaceId))) {
|
||||
return { success: false, error: 'Folder not found' }
|
||||
}
|
||||
|
||||
assertWorkflowMutationNotAborted(context)
|
||||
await updateFolderRecord(folderId, { name })
|
||||
|
||||
|
||||
@@ -105,11 +105,12 @@ export const getJobLogsServerTool: BaseServerTool<GetJobLogsArgs, JobLogEntry[]>
|
||||
}
|
||||
|
||||
const wsId = workspaceId || context.workspaceId
|
||||
if (wsId) {
|
||||
const access = await checkWorkspaceAccess(wsId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
throw new Error('Unauthorized workspace access')
|
||||
}
|
||||
if (!wsId) {
|
||||
throw new Error('Workspace context required')
|
||||
}
|
||||
const access = await checkWorkspaceAccess(wsId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
throw new Error('Unauthorized workspace access')
|
||||
}
|
||||
|
||||
const clampedLimit = Math.min(Math.max(1, limit), 5)
|
||||
@@ -121,7 +122,10 @@ export const getJobLogsServerTool: BaseServerTool<GetJobLogsArgs, JobLogEntry[]>
|
||||
includeDetails,
|
||||
})
|
||||
|
||||
const conditions = [eq(jobExecutionLogs.scheduleId, jobId)]
|
||||
const conditions = [
|
||||
eq(jobExecutionLogs.scheduleId, jobId),
|
||||
eq(jobExecutionLogs.workspaceId, wsId),
|
||||
]
|
||||
if (executionId) {
|
||||
conditions.push(eq(jobExecutionLogs.executionId, executionId))
|
||||
}
|
||||
|
||||
@@ -37,6 +37,11 @@ import {
|
||||
import { StorageService } from '@/lib/uploads'
|
||||
import { resolveWorkspaceFileReference } from '@/lib/uploads/contexts/workspace/workspace-file-manager'
|
||||
import { getQueryStrategy, handleVectorOnlySearch } from '@/app/api/knowledge/search/utils'
|
||||
import {
|
||||
checkDocumentWriteAccess,
|
||||
checkKnowledgeBaseAccess,
|
||||
checkKnowledgeBaseWriteAccess,
|
||||
} from '@/app/api/knowledge/utils'
|
||||
|
||||
const logger = createLogger('KnowledgeBaseServerTool')
|
||||
|
||||
@@ -141,6 +146,14 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const access = await checkKnowledgeBaseAccess(args.knowledgeBaseId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const knowledgeBase = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!knowledgeBase) {
|
||||
return {
|
||||
@@ -187,6 +200,14 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const access = await checkKnowledgeBaseAccess(args.knowledgeBaseId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const kb = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!kb) {
|
||||
return {
|
||||
@@ -257,6 +278,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const targetKb = await getKnowledgeBaseById(args.knowledgeBaseId)
|
||||
if (!targetKb || !targetKb.workspaceId) {
|
||||
return {
|
||||
@@ -363,6 +395,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const updatedKb = await updateKnowledgeBase(args.knowledgeBaseId, updates, requestId)
|
||||
@@ -400,6 +443,12 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
const notFound: string[] = []
|
||||
|
||||
for (const kbId of kbIds) {
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(kbId, context.userId)
|
||||
if (!writeAccess.hasAccess) {
|
||||
notFound.push(kbId)
|
||||
continue
|
||||
}
|
||||
|
||||
const kbToDelete = await getKnowledgeBaseById(kbId)
|
||||
if (!kbToDelete) {
|
||||
notFound.push(kbId)
|
||||
@@ -444,8 +493,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
const failed: string[] = []
|
||||
|
||||
for (const docId of docIds) {
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const docAccess = await checkDocumentWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
docId,
|
||||
context.userId
|
||||
)
|
||||
if (!docAccess.hasAccess) {
|
||||
failed.push(docId)
|
||||
continue
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
const result = await deleteDocument(docId, requestId)
|
||||
if (result.success) {
|
||||
deleted.push(docId)
|
||||
@@ -481,6 +539,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
message: 'At least one of filename or enabled is required for update_document',
|
||||
}
|
||||
}
|
||||
const docAccess = await checkDocumentWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
args.documentId,
|
||||
context.userId
|
||||
)
|
||||
if (!docAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Document with ID "${args.documentId}" not found`,
|
||||
}
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
await updateDocument(args.documentId, updateData, requestId)
|
||||
@@ -503,6 +572,14 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const access = await checkKnowledgeBaseAccess(args.knowledgeBaseId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const tagDefinitions = await getDocumentTagDefinitions(args.knowledgeBaseId)
|
||||
|
||||
logger.info('Tag definitions listed via copilot', {
|
||||
@@ -537,6 +614,18 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
message: 'tagDisplayName is required for create_tag operation',
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const fieldType = args.tagFieldType || 'text'
|
||||
|
||||
const tagSlot = await getNextAvailableSlot(args.knowledgeBaseId, fieldType)
|
||||
@@ -606,6 +695,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
existingTag.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Tag definition with ID "${args.tagDefinitionId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const updatedTag = await updateTagDefinition(args.tagDefinitionId, updateData, requestId)
|
||||
@@ -643,6 +743,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const deleted = await deleteTagDefinition(
|
||||
@@ -677,6 +788,14 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const access = await checkKnowledgeBaseAccess(args.knowledgeBaseId, context.userId)
|
||||
if (!access.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const requestId = generateId().slice(0, 8)
|
||||
const stats = await getTagUsageStats(args.knowledgeBaseId, requestId)
|
||||
|
||||
@@ -702,6 +821,17 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
}
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(
|
||||
args.knowledgeBaseId,
|
||||
context.userId
|
||||
)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Knowledge base with ID "${args.knowledgeBaseId}" not found`,
|
||||
}
|
||||
}
|
||||
|
||||
const createBody: Record<string, unknown> = {
|
||||
connectorType: args.connectorType,
|
||||
sourceConfig: args.sourceConfig ?? {},
|
||||
@@ -762,6 +892,11 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(kbId, context.userId)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
const updateBody: Record<string, unknown> = {}
|
||||
if (args.sourceConfig !== undefined) updateBody.sourceConfig = args.sourceConfig
|
||||
if (args.syncIntervalMinutes !== undefined)
|
||||
@@ -810,6 +945,11 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(deleteKbId, context.userId)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
assertNotAborted()
|
||||
const deleteRes = await connectorApiCall(
|
||||
context.userId,
|
||||
@@ -843,6 +983,11 @@ export const knowledgeBaseServerTool: BaseServerTool<KnowledgeBaseArgs, Knowledg
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
const writeAccess = await checkKnowledgeBaseWriteAccess(syncKbId, context.userId)
|
||||
if (!writeAccess.hasAccess) {
|
||||
return { success: false, message: `Connector "${args.connectorId}" not found` }
|
||||
}
|
||||
|
||||
assertNotAborted()
|
||||
const syncRes = await connectorApiCall(
|
||||
context.userId,
|
||||
|
||||
@@ -223,9 +223,12 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
|
||||
const table = await getTableById(args.tableId)
|
||||
if (!table) {
|
||||
if (!table || table.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
|
||||
@@ -240,9 +243,12 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
|
||||
const table = await getTableById(args.tableId)
|
||||
if (!table) {
|
||||
if (!table || table.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
|
||||
@@ -816,6 +822,9 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
const col = (args as Record<string, unknown>).column as
|
||||
| {
|
||||
name: string
|
||||
@@ -830,6 +839,10 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
message: 'column with name and type is required for add_column',
|
||||
}
|
||||
}
|
||||
const tableForAdd = await getTableById(args.tableId)
|
||||
if (!tableForAdd || tableForAdd.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const updated = await addTableColumn(args.tableId, col, requestId)
|
||||
@@ -844,11 +857,18 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
const colName = (args as Record<string, unknown>).columnName as string | undefined
|
||||
const newColName = (args as Record<string, unknown>).newName as string | undefined
|
||||
if (!colName || !newColName) {
|
||||
return { success: false, message: 'columnName and newName are required' }
|
||||
}
|
||||
const tableForRename = await getTableById(args.tableId)
|
||||
if (!tableForRename || tableForRename.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
assertNotAborted()
|
||||
const updated = await renameColumn(
|
||||
@@ -866,12 +886,19 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
const colName = (args as Record<string, unknown>).columnName as string | undefined
|
||||
const colNames = (args as Record<string, unknown>).columnNames as string[] | undefined
|
||||
const names = colNames ?? (colName ? [colName] : null)
|
||||
if (!names || names.length === 0) {
|
||||
return { success: false, message: 'columnName or columnNames is required' }
|
||||
}
|
||||
const tableForDelete = await getTableById(args.tableId)
|
||||
if (!tableForDelete || tableForDelete.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
if (names.length === 1) {
|
||||
assertNotAborted()
|
||||
@@ -901,6 +928,9 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
if (!args.tableId) {
|
||||
return { success: false, message: 'Table ID is required' }
|
||||
}
|
||||
if (!workspaceId) {
|
||||
return { success: false, message: 'Workspace ID is required' }
|
||||
}
|
||||
const colName = (args as Record<string, unknown>).columnName as string | undefined
|
||||
if (!colName) {
|
||||
return { success: false, message: 'columnName is required' }
|
||||
@@ -913,6 +943,10 @@ export const userTableServerTool: BaseServerTool<UserTableArgs, UserTableResult>
|
||||
message: 'At least one of newType or unique must be provided',
|
||||
}
|
||||
}
|
||||
const tableForUpdate = await getTableById(args.tableId)
|
||||
if (!tableForUpdate || tableForUpdate.workspaceId !== workspaceId) {
|
||||
return { success: false, message: `Table not found: ${args.tableId}` }
|
||||
}
|
||||
const requestId = generateId().slice(0, 8)
|
||||
let result: TableDefinition | undefined
|
||||
if (newType !== undefined) {
|
||||
|
||||
@@ -15,6 +15,7 @@ vi.mock('ioredis', () => ({
|
||||
|
||||
import {
|
||||
closeRedisConnection,
|
||||
extendLock,
|
||||
getRedisClient,
|
||||
onRedisReconnect,
|
||||
resetForTesting,
|
||||
@@ -120,6 +121,48 @@ describe('redis config', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('extendLock', () => {
|
||||
const lockKey = 'copilot:chat-stream-lock:chat-1'
|
||||
const value = 'stream-abc'
|
||||
const ttlSeconds = 60
|
||||
|
||||
it('returns true when the caller still owns the lock and EXPIRE succeeds', async () => {
|
||||
mockRedisInstance.eval.mockResolvedValueOnce(1)
|
||||
|
||||
const extended = await extendLock(lockKey, value, ttlSeconds)
|
||||
|
||||
expect(extended).toBe(true)
|
||||
expect(mockRedisInstance.eval).toHaveBeenCalledWith(
|
||||
expect.stringContaining('expire'),
|
||||
1,
|
||||
lockKey,
|
||||
value,
|
||||
ttlSeconds
|
||||
)
|
||||
})
|
||||
|
||||
it('returns false when the value does not match (lock owned by another)', async () => {
|
||||
mockRedisInstance.eval.mockResolvedValueOnce(0)
|
||||
|
||||
const extended = await extendLock(lockKey, value, ttlSeconds)
|
||||
|
||||
expect(extended).toBe(false)
|
||||
})
|
||||
|
||||
it('returns true as a no-op when Redis is unavailable', async () => {
|
||||
vi.resetModules()
|
||||
vi.doMock('@/lib/core/config/env', () =>
|
||||
createEnvMock({ REDIS_URL: undefined as unknown as string })
|
||||
)
|
||||
const { extendLock: extendLockNoRedis } = await import('@/lib/core/config/redis')
|
||||
|
||||
const extended = await extendLockNoRedis(lockKey, value, ttlSeconds)
|
||||
|
||||
expect(extended).toBe(true)
|
||||
vi.doUnmock('@/lib/core/config/env')
|
||||
})
|
||||
})
|
||||
|
||||
describe('retryStrategy', () => {
|
||||
function captureRetryStrategy(): (times: number) => number {
|
||||
let capturedConfig: Record<string, unknown> = {}
|
||||
|
||||
@@ -136,6 +136,21 @@ else
|
||||
end
|
||||
`
|
||||
|
||||
/**
|
||||
* Lua script for safe lock TTL extension.
|
||||
* Only refreshes the expiry if the value matches (ownership verification),
|
||||
* so a stale heartbeat from a prior owner cannot extend a lock currently
|
||||
* held by someone else after a TTL eviction.
|
||||
* Returns 1 if the TTL was extended, 0 if not (value mismatch or key gone).
|
||||
*/
|
||||
const EXTEND_LOCK_SCRIPT = `
|
||||
if redis.call("get", KEYS[1]) == ARGV[1] then
|
||||
return redis.call("expire", KEYS[1], ARGV[2])
|
||||
else
|
||||
return 0
|
||||
end
|
||||
`
|
||||
|
||||
/**
|
||||
* Acquire a distributed lock using Redis SET NX.
|
||||
* Returns true if lock acquired, false if already held.
|
||||
@@ -175,6 +190,29 @@ export async function releaseLock(lockKey: string, value: string): Promise<boole
|
||||
return result === 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Extend the TTL of a distributed lock if still owned by the caller.
|
||||
* Returns true if the caller still owns the lock and the TTL was refreshed,
|
||||
* false if the lock has been taken over by another owner or has expired.
|
||||
*
|
||||
* When Redis is not available, returns true (no-op) to match the behavior
|
||||
* of `acquireLock` / `releaseLock`: single-replica deployments without
|
||||
* Redis never held a real lock, so heartbeat success is implicit.
|
||||
*/
|
||||
export async function extendLock(
|
||||
lockKey: string,
|
||||
value: string,
|
||||
expirySeconds: number
|
||||
): Promise<boolean> {
|
||||
const redis = getRedisClient()
|
||||
if (!redis) {
|
||||
return true
|
||||
}
|
||||
|
||||
const result = await redis.eval(EXTEND_LOCK_SCRIPT, 1, lockKey, value, expirySeconds)
|
||||
return result === 1
|
||||
}
|
||||
|
||||
/**
|
||||
* Close the Redis connection.
|
||||
* Use for graceful shutdown.
|
||||
|
||||
@@ -704,7 +704,7 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
||||
services: {
|
||||
slack: {
|
||||
name: 'Slack',
|
||||
description: 'Send messages using a bot for Slack.',
|
||||
description: 'Use Slack messaging, files, reactions, views, and canvases.',
|
||||
providerId: 'slack',
|
||||
icon: SlackIcon,
|
||||
baseProviderIcon: SlackIcon,
|
||||
@@ -722,6 +722,7 @@ export const OAUTH_PROVIDERS: Record<string, OAuthProviderConfig> = {
|
||||
// TODO: Add 'users:read.email' once Slack app review is approved
|
||||
'files:write',
|
||||
'files:read',
|
||||
'canvases:read',
|
||||
'canvases:write',
|
||||
'reactions:write',
|
||||
],
|
||||
|
||||
@@ -278,7 +278,8 @@ export const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
'users:read.email': 'View user email addresses',
|
||||
'files:write': 'Upload files',
|
||||
'files:read': 'Download and read files',
|
||||
'canvases:write': 'Create canvas documents',
|
||||
'canvases:read': 'Read canvas sections',
|
||||
'canvases:write': 'Create, edit, and delete canvas documents',
|
||||
'reactions:write': 'Add emoji reactions to messages',
|
||||
|
||||
// Webflow scopes
|
||||
|
||||
@@ -3,11 +3,58 @@
|
||||
*/
|
||||
import { dbChainMock, dbChainMockFns, resetDbChainMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { updateRow } from '@/lib/table/service'
|
||||
import {
|
||||
batchInsertRows,
|
||||
deleteColumn,
|
||||
insertRow,
|
||||
renameColumn,
|
||||
replaceTableRows,
|
||||
updateRow,
|
||||
upsertRow,
|
||||
} from '@/lib/table/service'
|
||||
import type { TableDefinition } from '@/lib/table/types'
|
||||
import { getUniqueColumns } from '@/lib/table/validation'
|
||||
|
||||
vi.mock('@sim/db', () => dbChainMock)
|
||||
|
||||
vi.mock('@/lib/table/validation', () => ({
|
||||
validateRowSize: vi.fn(() => ({ valid: true, errors: [] })),
|
||||
validateRowAgainstSchema: vi.fn(() => ({ valid: true, errors: [] })),
|
||||
validateTableName: vi.fn(() => ({ valid: true, errors: [] })),
|
||||
validateTableSchema: vi.fn(() => ({ valid: true, errors: [] })),
|
||||
getUniqueColumns: vi.fn(() => []),
|
||||
checkUniqueConstraintsDb: vi.fn(async () => ({ valid: true, errors: [] })),
|
||||
checkBatchUniqueConstraintsDb: vi.fn(async () => ({ valid: true, errors: [] })),
|
||||
}))
|
||||
|
||||
/**
|
||||
* Inspects the queued `trx.execute(...)` calls for SQL containing `substring`.
|
||||
* Works with both `sql\`...\`` (produces `{ strings, values }`) and `sql.raw(...)`
|
||||
* (produces `{ rawSql }`) from the global drizzle mock.
|
||||
*/
|
||||
function findExecutedSqlContaining(substring: string): boolean {
|
||||
return dbChainMockFns.execute.mock.calls.some(([arg]) => {
|
||||
if (!arg || typeof arg !== 'object') return false
|
||||
const a = arg as Record<string, unknown>
|
||||
if (Array.isArray(a.strings)) {
|
||||
return (a.strings as string[]).some((s) => typeof s === 'string' && s.includes(substring))
|
||||
}
|
||||
if (typeof a.rawSql === 'string') {
|
||||
return (a.rawSql as string).includes(substring)
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
function findExecutedRawSql(substring: string): string | undefined {
|
||||
for (const [arg] of dbChainMockFns.execute.mock.calls) {
|
||||
if (!arg || typeof arg !== 'object') continue
|
||||
const raw = (arg as { rawSql?: unknown }).rawSql
|
||||
if (typeof raw === 'string' && raw.includes(substring)) return raw
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
const EXISTING_ROW = {
|
||||
id: 'row-1',
|
||||
tableId: 'tbl-1',
|
||||
@@ -106,3 +153,289 @@ describe('updateRow — partial merge', () => {
|
||||
).rejects.toThrow('Row not found')
|
||||
})
|
||||
})
|
||||
|
||||
describe('insertRow — position race safety (migration 0198 + advisory lock)', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks()
|
||||
resetDbChainMock()
|
||||
vi.mocked(getUniqueColumns).mockReturnValue([])
|
||||
})
|
||||
|
||||
it('auto-position inserts acquire the per-table advisory lock before reading max(position)', async () => {
|
||||
await expect(
|
||||
insertRow({ tableId: 'tbl-1', data: { name: 'a' }, workspaceId: 'ws-1' }, TABLE, 'req-1')
|
||||
).rejects.toBeDefined()
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
expect(findExecutedSqlContaining('hashtextextended')).toBe(true)
|
||||
})
|
||||
|
||||
it('explicit-position inserts also acquire the advisory lock to serialize position shifts', async () => {
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([])
|
||||
dbChainMockFns.returning.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'row-1',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'a' },
|
||||
position: 5,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
])
|
||||
|
||||
await insertRow(
|
||||
{ tableId: 'tbl-1', data: { name: 'a' }, workspaceId: 'ws-1', position: 5 },
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
// `(table_id, position)` index is non-unique, so concurrent explicit-position
|
||||
// inserts at the same slot could both skip the shift and duplicate — lock
|
||||
// serializes them.
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
})
|
||||
|
||||
it('batchInsertRows acquires the advisory lock (always auto-positioned)', async () => {
|
||||
await expect(
|
||||
batchInsertRows(
|
||||
{ tableId: 'tbl-1', rows: [{ name: 'a' }, { name: 'b' }], workspaceId: 'ws-1' },
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
).rejects.toBeDefined()
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
})
|
||||
|
||||
it('batchInsertRows with explicit positions acquires the advisory lock', async () => {
|
||||
dbChainMockFns.returning.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'row-1',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'a' },
|
||||
position: 3,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
{
|
||||
id: 'row-2',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'b' },
|
||||
position: 4,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
])
|
||||
|
||||
await batchInsertRows(
|
||||
{
|
||||
tableId: 'tbl-1',
|
||||
rows: [{ name: 'a' }, { name: 'b' }],
|
||||
workspaceId: 'ws-1',
|
||||
positions: [3, 4],
|
||||
},
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
})
|
||||
|
||||
it('upsertRow skips the advisory lock on the update path (match found)', async () => {
|
||||
vi.mocked(getUniqueColumns).mockReturnValue([{ name: 'name', type: 'string', unique: true }])
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'row-1',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Alice', age: 30 },
|
||||
position: 0,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
])
|
||||
dbChainMockFns.returning.mockResolvedValueOnce([
|
||||
{
|
||||
id: 'row-1',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Alice', age: 31 },
|
||||
position: 0,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
])
|
||||
|
||||
await upsertRow(
|
||||
{
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Alice', age: 31 },
|
||||
conflictTarget: 'name',
|
||||
},
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(false)
|
||||
})
|
||||
|
||||
it('upsertRow acquires the advisory lock on the insert path (no match)', async () => {
|
||||
vi.mocked(getUniqueColumns).mockReturnValue([{ name: 'name', type: 'string', unique: true }])
|
||||
// Initial existing-row check + post-lock re-check both find no match.
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([])
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([])
|
||||
|
||||
await expect(
|
||||
upsertRow(
|
||||
{
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Bob', age: 25 },
|
||||
conflictTarget: 'name',
|
||||
},
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
).rejects.toBeDefined()
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
})
|
||||
|
||||
it('upsertRow re-checks after acquiring the lock and switches to UPDATE when a racing tx inserted the row', async () => {
|
||||
vi.mocked(getUniqueColumns).mockReturnValue([{ name: 'name', type: 'string', unique: true }])
|
||||
// Initial existing-row check: no match (another tx has not committed yet).
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([])
|
||||
// Post-lock re-check: a racing tx just inserted the row.
|
||||
const racedRow = {
|
||||
id: 'row-raced',
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Bob', age: 25 },
|
||||
position: 0,
|
||||
createdAt: new Date(),
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([racedRow])
|
||||
// UPDATE returning the patched row.
|
||||
dbChainMockFns.returning.mockResolvedValueOnce([
|
||||
{ ...racedRow, data: { name: 'Bob', age: 26 } },
|
||||
])
|
||||
|
||||
const result = await upsertRow(
|
||||
{
|
||||
tableId: 'tbl-1',
|
||||
workspaceId: 'ws-1',
|
||||
data: { name: 'Bob', age: 26 },
|
||||
conflictTarget: 'name',
|
||||
},
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
expect(result.operation).toBe('update')
|
||||
expect(result.row.id).toBe('row-raced')
|
||||
expect(dbChainMockFns.update).toHaveBeenCalled()
|
||||
expect(dbChainMockFns.insert).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('mutation paths — SET LOCAL timeouts', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
resetDbChainMock()
|
||||
})
|
||||
|
||||
it('insertRow sets the default 10s/3s/5s timeouts', async () => {
|
||||
await expect(
|
||||
insertRow({ tableId: 'tbl-1', data: { name: 'a' }, workspaceId: 'ws-1' }, TABLE, 'req-1')
|
||||
).rejects.toBeDefined()
|
||||
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '10000ms'")).toBeDefined()
|
||||
expect(findExecutedRawSql("SET LOCAL lock_timeout = '3000ms'")).toBeDefined()
|
||||
expect(
|
||||
findExecutedRawSql("SET LOCAL idle_in_transaction_session_timeout = '5000ms'")
|
||||
).toBeDefined()
|
||||
})
|
||||
|
||||
it('batchInsertRows raises statement_timeout to 60s', async () => {
|
||||
await expect(
|
||||
batchInsertRows(
|
||||
{ tableId: 'tbl-1', rows: [{ name: 'a' }], workspaceId: 'ws-1' },
|
||||
TABLE,
|
||||
'req-1'
|
||||
)
|
||||
).rejects.toBeDefined()
|
||||
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '60000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('replaceTableRows scales statement_timeout with (existing + new) row count', async () => {
|
||||
const bigTable: TableDefinition = { ...TABLE, rowCount: 100_000, maxRows: 1_000_000 }
|
||||
const payload = Array.from({ length: 50_000 }, (_, i) => ({ name: `row-${i}` }))
|
||||
|
||||
await replaceTableRows(
|
||||
{ tableId: 'tbl-1', workspaceId: 'ws-1', rows: payload },
|
||||
bigTable,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
// (100_000 + 50_000) × 3ms/row = 450_000ms; above 120_000 floor, below 600_000 cap
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '450000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('replaceTableRows caps scaled timeout at 10 minutes for very large tables', async () => {
|
||||
const hugeTable: TableDefinition = { ...TABLE, rowCount: 10_000_000, maxRows: 20_000_000 }
|
||||
|
||||
await replaceTableRows({ tableId: 'tbl-1', workspaceId: 'ws-1', rows: [] }, hugeTable, 'req-1')
|
||||
|
||||
// 10M × 3ms = 30M ms, capped at 600_000ms (10 min)
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '600000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('replaceTableRows uses the 120s floor on small tables', async () => {
|
||||
const smallTable: TableDefinition = { ...TABLE, rowCount: 10 }
|
||||
|
||||
await replaceTableRows(
|
||||
{ tableId: 'tbl-1', workspaceId: 'ws-1', rows: [{ name: 'a' }, { name: 'b' }] },
|
||||
smallTable,
|
||||
'req-1'
|
||||
)
|
||||
|
||||
// 12 × 3ms = 36ms → floored at 120_000ms
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '120000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('renameColumn scales statement_timeout with table.rowCount', async () => {
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([{ ...TABLE, rowCount: 500_000 }])
|
||||
|
||||
await renameColumn({ tableId: 'tbl-1', oldName: 'name', newName: 'full_name' }, 'req-1')
|
||||
|
||||
// 500_000 × 2ms = 1_000_000 → capped at 600_000
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '600000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('deleteColumn uses the 60s floor on small tables', async () => {
|
||||
dbChainMockFns.limit.mockResolvedValueOnce([{ ...TABLE, rowCount: 100 }])
|
||||
|
||||
await deleteColumn({ tableId: 'tbl-1', columnName: 'age' }, 'req-1')
|
||||
|
||||
// 100 × 2ms = 200ms → floored at 60_000ms
|
||||
expect(findExecutedRawSql("SET LOCAL statement_timeout = '60000ms'")).toBeDefined()
|
||||
})
|
||||
|
||||
it('replaceTableRows acquires the per-table advisory lock to serialize concurrent replaces', async () => {
|
||||
await replaceTableRows(
|
||||
{ tableId: 'tbl-1', workspaceId: 'ws-1', rows: [{ name: 'a' }] },
|
||||
{ ...TABLE, rowCount: 5 },
|
||||
'req-1'
|
||||
)
|
||||
|
||||
expect(findExecutedSqlContaining('pg_advisory_xact_lock')).toBe(true)
|
||||
expect(findExecutedSqlContaining('hashtextextended')).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -64,6 +64,80 @@ export class TableConflictError extends Error {
|
||||
|
||||
export type TableScope = 'active' | 'archived' | 'all'
|
||||
|
||||
type DbTransaction = Parameters<Parameters<typeof db.transaction>[0]>[0]
|
||||
|
||||
/**
|
||||
* Sets per-transaction Postgres timeouts via `SET LOCAL`.
|
||||
*
|
||||
* `lock_timeout` is the critical one: without it, a waiter inherits the full
|
||||
* `statement_timeout` clock, so one stuck writer can drain the pool.
|
||||
*
|
||||
* Safe under pgBouncer transaction pooling — `SET LOCAL` is transaction-scoped
|
||||
* and cleared at COMMIT/ROLLBACK before the session returns to the pool.
|
||||
*/
|
||||
async function setTableTxTimeouts(
|
||||
trx: DbTransaction,
|
||||
opts?: { statementMs?: number; lockMs?: number; idleMs?: number }
|
||||
) {
|
||||
const s = opts?.statementMs ?? 10_000
|
||||
const l = opts?.lockMs ?? 3_000
|
||||
const i = opts?.idleMs ?? 5_000
|
||||
await trx.execute(sql.raw(`SET LOCAL statement_timeout = '${s}ms'`))
|
||||
await trx.execute(sql.raw(`SET LOCAL lock_timeout = '${l}ms'`))
|
||||
await trx.execute(sql.raw(`SET LOCAL idle_in_transaction_session_timeout = '${i}ms'`))
|
||||
}
|
||||
|
||||
/**
|
||||
* Serializes writers that compute `max(position) + 1` for the same table.
|
||||
*
|
||||
* The row-count trigger (migration 0198) serializes capacity via a row lock on
|
||||
* `user_table_definitions` — but it fires AFTER INSERT, so two concurrent
|
||||
* auto-positioned inserts can read the same snapshot and assign the same
|
||||
* position (the `(table_id, position)` index is non-unique). This advisory
|
||||
* lock restores the pre-trigger serialization scoped to a single table, with
|
||||
* no cross-table contention. Released automatically at COMMIT/ROLLBACK.
|
||||
*/
|
||||
async function acquireTablePositionLock(trx: DbTransaction, tableId: string) {
|
||||
await trx.execute(
|
||||
sql`SELECT pg_advisory_xact_lock(hashtextextended(${`user_table_rows_pos:${tableId}`}, 0))`
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the next auto-assigned `position` for a table (max(position) + 1, or 0
|
||||
* if empty). Callers must hold `acquireTablePositionLock` to avoid two concurrent
|
||||
* writers computing the same value against the same snapshot.
|
||||
*/
|
||||
async function nextAutoPosition(trx: DbTransaction, tableId: string): Promise<number> {
|
||||
const [{ maxPos }] = await trx
|
||||
.select({
|
||||
maxPos: sql<number>`coalesce(max(${userTableRows.position}), -1)`.mapWith(Number),
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, tableId))
|
||||
return maxPos + 1
|
||||
}
|
||||
|
||||
const TIMEOUT_CAP_MS = 10 * 60_000
|
||||
|
||||
/**
|
||||
* Scales `statement_timeout` to the expected row-count work.
|
||||
*
|
||||
* Bulk operations that rewrite JSONB or cascade row triggers (e.g.
|
||||
* `replaceTableRows`, `deleteColumn`, `renameColumn`) scale roughly linearly
|
||||
* with row count. A fixed cap would regress large-table users who never saw a
|
||||
* timeout before `SET LOCAL` was introduced. This helper picks
|
||||
* `max(baseMs, rowCount * perRowMs)`, capped at 10 minutes so a single
|
||||
* runaway transaction cannot indefinitely pin a pool connection.
|
||||
*/
|
||||
function scaledStatementTimeoutMs(
|
||||
rowCount: number,
|
||||
opts: { baseMs: number; perRowMs: number }
|
||||
): number {
|
||||
const safeRowCount = Math.max(0, rowCount)
|
||||
return Math.min(TIMEOUT_CAP_MS, Math.max(opts.baseMs, safeRowCount * opts.perRowMs))
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a table by ID with full details.
|
||||
*
|
||||
@@ -88,16 +162,14 @@ export async function getTableById(
|
||||
archivedAt: userTableDefinitions.archivedAt,
|
||||
createdAt: userTableDefinitions.createdAt,
|
||||
updatedAt: userTableDefinitions.updatedAt,
|
||||
rowCount: sql<number>`coalesce(${count(userTableRows.id)}, 0)`.mapWith(Number),
|
||||
rowCount: userTableDefinitions.rowCount,
|
||||
})
|
||||
.from(userTableDefinitions)
|
||||
.leftJoin(userTableRows, eq(userTableRows.tableId, userTableDefinitions.id))
|
||||
.where(
|
||||
includeArchived
|
||||
? eq(userTableDefinitions.id, tableId)
|
||||
: and(eq(userTableDefinitions.id, tableId), isNull(userTableDefinitions.archivedAt))
|
||||
)
|
||||
.groupBy(userTableDefinitions.id)
|
||||
.limit(1)
|
||||
|
||||
if (results.length === 0) return null
|
||||
@@ -156,10 +228,9 @@ export async function listTables(
|
||||
archivedAt: userTableDefinitions.archivedAt,
|
||||
createdAt: userTableDefinitions.createdAt,
|
||||
updatedAt: userTableDefinitions.updatedAt,
|
||||
rowCount: sql<number>`coalesce(${count(userTableRows.id)}, 0)`.mapWith(Number),
|
||||
rowCount: userTableDefinitions.rowCount,
|
||||
})
|
||||
.from(userTableDefinitions)
|
||||
.leftJoin(userTableRows, eq(userTableRows.tableId, userTableDefinitions.id))
|
||||
.where(
|
||||
scope === 'all'
|
||||
? eq(userTableDefinitions.workspaceId, workspaceId)
|
||||
@@ -173,7 +244,6 @@ export async function listTables(
|
||||
isNull(userTableDefinitions.archivedAt)
|
||||
)
|
||||
)
|
||||
.groupBy(userTableDefinitions.id)
|
||||
.orderBy(userTableDefinitions.createdAt)
|
||||
|
||||
return tables.map((t) => ({
|
||||
@@ -240,6 +310,7 @@ export async function createTable(
|
||||
// to prevent TOCTOU race on the table count limit
|
||||
try {
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx)
|
||||
await trx.execute(sql`SELECT 1 FROM workspace WHERE id = ${data.workspaceId} FOR UPDATE`)
|
||||
|
||||
const [{ count: existingCount }] = await trx
|
||||
@@ -510,6 +581,7 @@ export async function restoreTable(tableId: string, requestId: string): Promise<
|
||||
attemptedRestoreName = ''
|
||||
try {
|
||||
await db.transaction(async (tx) => {
|
||||
await setTableTxTimeouts(tx)
|
||||
await tx.execute(sql`SELECT 1 FROM user_table_definitions WHERE id = ${tableId} FOR UPDATE`)
|
||||
|
||||
attemptedRestoreName = await generateRestoreName(table.name, async (candidate) => {
|
||||
@@ -585,25 +657,22 @@ export async function insertRow(
|
||||
const rowId = `row_${generateId().replace(/-/g, '')}`
|
||||
const now = new Date()
|
||||
|
||||
// Atomic capacity check + insert inside a transaction.
|
||||
// FOR UPDATE on the table definition row serializes concurrent inserts,
|
||||
// preventing the TOCTOU race where multiple requests pass the count check.
|
||||
// Capacity enforcement lives in the `increment_user_table_row_count` trigger
|
||||
// (migration 0198): a single conditional UPDATE on user_table_definitions
|
||||
// increments row_count iff row_count < max_rows, taking the row lock
|
||||
// atomically. No app-level FOR UPDATE / COUNT needed.
|
||||
const [row] = await db.transaction(async (trx) => {
|
||||
await trx.execute(
|
||||
sql`SELECT 1 FROM user_table_definitions WHERE id = ${data.tableId} FOR UPDATE`
|
||||
)
|
||||
|
||||
const [{ count: currentCount }] = await trx
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
if (Number(currentCount) >= table.maxRows) {
|
||||
throw new Error(`Table has reached maximum row limit (${table.maxRows})`)
|
||||
}
|
||||
await setTableTxTimeouts(trx)
|
||||
|
||||
let targetPosition: number
|
||||
|
||||
// The `(table_id, position)` index is non-unique, so we serialize all
|
||||
// position-aware writes (explicit and auto) through the per-table
|
||||
// advisory lock. Without this, two concurrent explicit-position inserts
|
||||
// at the same position can both observe an empty slot, both skip the
|
||||
// shift, and each INSERT a row with a duplicate `(table_id, position)`.
|
||||
await acquireTablePositionLock(trx, data.tableId)
|
||||
|
||||
if (data.position !== undefined) {
|
||||
targetPosition = data.position
|
||||
|
||||
@@ -627,14 +696,7 @@ export async function insertRow(
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const [{ maxPos }] = await trx
|
||||
.select({
|
||||
maxPos: sql<number>`coalesce(max(${userTableRows.position}), -1)`.mapWith(Number),
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
targetPosition = maxPos + 1
|
||||
targetPosition = await nextAutoPosition(trx, data.tableId)
|
||||
}
|
||||
|
||||
return trx
|
||||
@@ -706,24 +768,12 @@ export async function batchInsertRows(
|
||||
|
||||
const now = new Date()
|
||||
|
||||
// Atomic capacity check + insert inside a transaction.
|
||||
// FOR UPDATE on the table definition row serializes concurrent inserts.
|
||||
// Capacity enforcement lives in the `increment_user_table_row_count` trigger
|
||||
// (migration 0198) — fires per row and raises `Maximum row limit (%) reached ...`
|
||||
// if the cap is hit mid-batch. The outer transaction means a partial batch
|
||||
// rolls back cleanly.
|
||||
const insertedRows = await db.transaction(async (trx) => {
|
||||
await trx.execute(
|
||||
sql`SELECT 1 FROM user_table_definitions WHERE id = ${data.tableId} FOR UPDATE`
|
||||
)
|
||||
|
||||
const [{ count: currentCount }] = await trx
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
const remainingCapacity = table.maxRows - Number(currentCount)
|
||||
if (remainingCapacity < data.rows.length) {
|
||||
throw new Error(
|
||||
`Insufficient capacity. Can only insert ${remainingCapacity} more rows (table has ${Number(currentCount)}/${table.maxRows} rows)`
|
||||
)
|
||||
}
|
||||
await setTableTxTimeouts(trx, { statementMs: 60_000 })
|
||||
|
||||
const buildRow = (rowData: RowData, position: number) => ({
|
||||
id: `row_${generateId().replace(/-/g, '')}`,
|
||||
@@ -736,6 +786,10 @@ export async function batchInsertRows(
|
||||
...(data.userId ? { createdBy: data.userId } : {}),
|
||||
})
|
||||
|
||||
// Serialize position-aware writes per-table. See `acquireTablePositionLock`
|
||||
// for why both explicit- and auto-position paths take this lock.
|
||||
await acquireTablePositionLock(trx, data.tableId)
|
||||
|
||||
if (data.positions && data.positions.length > 0) {
|
||||
// Position-aware insert: shift existing rows to create gaps, then insert.
|
||||
// Process positions ascending so each shift preserves gaps created by prior shifts.
|
||||
@@ -755,14 +809,8 @@ export async function batchInsertRows(
|
||||
return trx.insert(userTableRows).values(rowsToInsert).returning()
|
||||
}
|
||||
|
||||
const [{ maxPos }] = await trx
|
||||
.select({
|
||||
maxPos: sql<number>`coalesce(max(${userTableRows.position}), -1)`.mapWith(Number),
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
const rowsToInsert = data.rows.map((rowData, i) => buildRow(rowData, maxPos + 1 + i))
|
||||
const startPos = await nextAutoPosition(trx, data.tableId)
|
||||
const rowsToInsert = data.rows.map((rowData, i) => buildRow(rowData, startPos + i))
|
||||
|
||||
return trx.insert(userTableRows).values(rowsToInsert).returning()
|
||||
})
|
||||
@@ -849,10 +897,21 @@ export async function replaceTableRows(
|
||||
|
||||
const now = new Date()
|
||||
|
||||
const totalRowWork = Math.max(0, table.rowCount ?? 0) + data.rows.length
|
||||
const statementMs = scaledStatementTimeoutMs(totalRowWork, {
|
||||
baseMs: 120_000,
|
||||
perRowMs: 3,
|
||||
})
|
||||
|
||||
const result = await db.transaction(async (trx) => {
|
||||
await trx.execute(
|
||||
sql`SELECT 1 FROM user_table_definitions WHERE id = ${data.tableId} FOR UPDATE`
|
||||
)
|
||||
await setTableTxTimeouts(trx, { statementMs })
|
||||
|
||||
// Serialize concurrent replaces (and concurrent auto-position inserts) on the
|
||||
// same table. Without this, two concurrent replaces each see their own MVCC
|
||||
// snapshot for the DELETE; the second's DELETE would not observe rows the
|
||||
// first inserted, so both transactions commit and the table ends up with
|
||||
// the union of both row sets instead of only the last caller's rows.
|
||||
await acquireTablePositionLock(trx, data.tableId)
|
||||
|
||||
const deletedRows = await trx
|
||||
.delete(userTableRows)
|
||||
@@ -897,8 +956,11 @@ export async function replaceTableRows(
|
||||
* column, otherwise inserts a new row.
|
||||
*
|
||||
* Uses a single unique column for matching (not OR across all unique columns) to avoid
|
||||
* ambiguous matches when multiple unique columns exist. Capacity checks run inside the
|
||||
* transaction with a FOR UPDATE lock to prevent TOCTOU races.
|
||||
* ambiguous matches when multiple unique columns exist. Capacity enforcement lives
|
||||
* in the `increment_user_table_row_count` trigger (migration 0198). On the insert
|
||||
* path we acquire the per-table advisory lock and re-check for an existing match
|
||||
* before inserting, so a concurrent upsert racing on the same conflict target
|
||||
* cannot produce a duplicate row.
|
||||
*
|
||||
* @param data - Upsert data including optional conflictTarget
|
||||
* @param table - Table definition
|
||||
@@ -965,12 +1027,10 @@ export async function upsertRow(
|
||||
? sql`${userTableRows.data}->>${sql.raw(`'${targetColumnName}'`)} = ${String(targetValue)}`
|
||||
: sql`(${userTableRows.data}->${sql.raw(`'${targetColumnName}'`)})::jsonb = ${JSON.stringify(targetValue)}::jsonb`
|
||||
|
||||
// Entire upsert runs in a transaction with FOR UPDATE lock on the table definition.
|
||||
// This serializes concurrent upserts and prevents the TOCTOU race on row count.
|
||||
// Capacity enforcement for the insert path lives in the `increment_user_table_row_count`
|
||||
// trigger (migration 0198). The update path doesn't change row_count, so no check needed.
|
||||
const result = await db.transaction(async (trx) => {
|
||||
await trx.execute(
|
||||
sql`SELECT 1 FROM user_table_definitions WHERE id = ${data.tableId} FOR UPDATE`
|
||||
)
|
||||
await setTableTxTimeouts(trx)
|
||||
|
||||
// Find existing row by single conflict target column
|
||||
const [existingRow] = await trx
|
||||
@@ -998,14 +1058,33 @@ export async function upsertRow(
|
||||
|
||||
const now = new Date()
|
||||
|
||||
if (existingRow) {
|
||||
// Resolve which row (if any) we should update. If the initial SELECT missed,
|
||||
// acquire the lock and re-check — a concurrent upsert may have inserted the
|
||||
// matching row between our SELECT and the INSERT path, and without the
|
||||
// re-check both transactions would insert and produce a duplicate that
|
||||
// bypasses the app-level unique check.
|
||||
let matchedRowId = existingRow?.id
|
||||
if (!matchedRowId) {
|
||||
await acquireTablePositionLock(trx, data.tableId)
|
||||
const [racedRow] = await trx
|
||||
.select({ id: userTableRows.id })
|
||||
.from(userTableRows)
|
||||
.where(
|
||||
and(
|
||||
eq(userTableRows.tableId, data.tableId),
|
||||
eq(userTableRows.workspaceId, data.workspaceId),
|
||||
matchFilter
|
||||
)
|
||||
)
|
||||
.limit(1)
|
||||
matchedRowId = racedRow?.id
|
||||
}
|
||||
|
||||
if (matchedRowId) {
|
||||
const [updatedRow] = await trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: data.data,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, existingRow.id))
|
||||
.set({ data: data.data, updatedAt: now })
|
||||
.where(eq(userTableRows.id, matchedRowId))
|
||||
.returning()
|
||||
|
||||
return {
|
||||
@@ -1020,23 +1099,6 @@ export async function upsertRow(
|
||||
}
|
||||
}
|
||||
|
||||
// Check capacity atomically (inside the lock)
|
||||
const [{ count: currentCount }] = await trx
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
if (Number(currentCount) >= table.maxRows) {
|
||||
throw new Error(`Table row limit reached (${table.maxRows} rows max)`)
|
||||
}
|
||||
|
||||
const [{ maxPos }] = await trx
|
||||
.select({
|
||||
maxPos: sql<number>`coalesce(max(${userTableRows.position}), -1)`.mapWith(Number),
|
||||
})
|
||||
.from(userTableRows)
|
||||
.where(eq(userTableRows.tableId, data.tableId))
|
||||
|
||||
const [insertedRow] = await trx
|
||||
.insert(userTableRows)
|
||||
.values({
|
||||
@@ -1044,7 +1106,7 @@ export async function upsertRow(
|
||||
tableId: data.tableId,
|
||||
workspaceId: data.workspaceId,
|
||||
data: data.data,
|
||||
position: maxPos + 1,
|
||||
position: await nextAutoPosition(trx, data.tableId),
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
...(data.userId ? { createdBy: data.userId } : {}),
|
||||
@@ -1073,6 +1135,14 @@ export async function upsertRow(
|
||||
/**
|
||||
* Queries rows from a table with filtering, sorting, and pagination.
|
||||
*
|
||||
* Filter cost model: equality filters (`$eq`, `$in`) compile to JSONB
|
||||
* containment (`@>`) and hit the GIN (jsonb_path_ops) index on
|
||||
* `user_table_rows.data`. Range operators (`$gt`, `$gte`, `$lt`, `$lte`) and
|
||||
* `$contains` compile to `data->>'field'` text extraction and bypass the GIN
|
||||
* index — they fall back to a sequential scan of the rows for the table
|
||||
* (bounded only by the btree on `table_id`). Prefer equality on hot paths; set
|
||||
* `includeTotal: false` when the caller does not need the `COUNT(*)`.
|
||||
*
|
||||
* @param tableId - Table ID to query
|
||||
* @param workspaceId - Workspace ID for access control
|
||||
* @param options - Query options (filter, sort, limit, offset)
|
||||
@@ -1085,7 +1155,13 @@ export async function queryRows(
|
||||
options: QueryOptions,
|
||||
requestId: string
|
||||
): Promise<QueryResult> {
|
||||
const { filter, sort, limit = TABLE_LIMITS.DEFAULT_QUERY_LIMIT, offset = 0 } = options
|
||||
const {
|
||||
filter,
|
||||
sort,
|
||||
limit = TABLE_LIMITS.DEFAULT_QUERY_LIMIT,
|
||||
offset = 0,
|
||||
includeTotal = true,
|
||||
} = options
|
||||
|
||||
const tableName = USER_TABLE_ROWS_SQL_NAME
|
||||
|
||||
@@ -1103,13 +1179,14 @@ export async function queryRows(
|
||||
}
|
||||
}
|
||||
|
||||
// Get total count
|
||||
const countResult = await db
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(whereClause ?? baseConditions)
|
||||
|
||||
const totalCount = Number(countResult[0].count)
|
||||
let totalCount: number | null = null
|
||||
if (includeTotal) {
|
||||
const countResult = await db
|
||||
.select({ count: count() })
|
||||
.from(userTableRows)
|
||||
.where(whereClause ?? baseConditions)
|
||||
totalCount = Number(countResult[0].count)
|
||||
}
|
||||
|
||||
// Build ORDER BY clause (default to position ASC for stable ordering)
|
||||
let orderByClause
|
||||
@@ -1273,6 +1350,7 @@ export async function deleteRow(
|
||||
requestId: string
|
||||
): Promise<void> {
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx)
|
||||
const [deleted] = await trx
|
||||
.delete(userTableRows)
|
||||
.where(
|
||||
@@ -1351,49 +1429,45 @@ export async function updateRowsByFilter(
|
||||
}
|
||||
|
||||
const uniqueColumns = getUniqueColumns(table.schema)
|
||||
if (uniqueColumns.length > 0) {
|
||||
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in data.data)
|
||||
if (uniqueColumnsInUpdate.length > 0) {
|
||||
if (matchingRows.length > 1) {
|
||||
const uniqueColumnsInUpdate = uniqueColumns.filter((col) => col.name in data.data)
|
||||
if (uniqueColumnsInUpdate.length > 0) {
|
||||
throw new Error(
|
||||
`Cannot set unique column values when updating multiple rows. ` +
|
||||
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
|
||||
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`
|
||||
)
|
||||
}
|
||||
throw new Error(
|
||||
`Cannot set unique column values when updating multiple rows. ` +
|
||||
`Columns with unique constraint: ${uniqueColumnsInUpdate.map((c) => c.name).join(', ')}. ` +
|
||||
`Updating ${matchingRows.length} rows with the same value would violate uniqueness.`
|
||||
)
|
||||
}
|
||||
|
||||
for (const row of matchingRows) {
|
||||
const existingData = row.data as RowData
|
||||
const mergedData = { ...existingData, ...data.data }
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(
|
||||
data.tableId,
|
||||
mergedData,
|
||||
table.schema,
|
||||
row.id
|
||||
)
|
||||
if (!uniqueValidation.valid) {
|
||||
throw new Error(`Unique constraint violation: ${uniqueValidation.errors.join(', ')}`)
|
||||
}
|
||||
// Only one row — only the touched unique columns need re-checking.
|
||||
const row = matchingRows[0]
|
||||
const mergedData = { ...(row.data as RowData), ...data.data }
|
||||
const uniqueValidation = await checkUniqueConstraintsDb(
|
||||
data.tableId,
|
||||
mergedData,
|
||||
table.schema,
|
||||
row.id
|
||||
)
|
||||
if (!uniqueValidation.valid) {
|
||||
throw new Error(`Unique constraint violation: ${uniqueValidation.errors.join(', ')}`)
|
||||
}
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const ids = matchingRows.map((r) => r.id)
|
||||
const patchJson = JSON.stringify(data.data)
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
for (let i = 0; i < matchingRows.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batch = matchingRows.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
const updatePromises = batch.map((row) => {
|
||||
const existingData = row.data as RowData
|
||||
return trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: { ...existingData, ...data.data },
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(userTableRows.id, row.id))
|
||||
})
|
||||
await Promise.all(updatePromises)
|
||||
await setTableTxTimeouts(trx, { statementMs: 60_000 })
|
||||
for (let i = 0; i < ids.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batchIds = ids.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
await trx
|
||||
.update(userTableRows)
|
||||
.set({
|
||||
data: sql`${userTableRows.data} || ${patchJson}::jsonb`,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(inArray(userTableRows.id, batchIds))
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1401,7 +1475,7 @@ export async function updateRowsByFilter(
|
||||
|
||||
return {
|
||||
affectedCount: matchingRows.length,
|
||||
affectedRowIds: matchingRows.map((r) => r.id),
|
||||
affectedRowIds: ids,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1473,6 +1547,7 @@ export async function batchUpdateRows(
|
||||
const now = new Date()
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx, { statementMs: 60_000 })
|
||||
for (let i = 0; i < mergedUpdates.length; i += TABLE_LIMITS.UPDATE_BATCH_SIZE) {
|
||||
const batch = mergedUpdates.slice(i, i + TABLE_LIMITS.UPDATE_BATCH_SIZE)
|
||||
const updatePromises = batch.map(({ rowId, mergedData }) =>
|
||||
@@ -1493,20 +1568,38 @@ export async function batchUpdateRows(
|
||||
}
|
||||
}
|
||||
|
||||
type DbTransaction = Parameters<Parameters<typeof db.transaction>[0]>[0]
|
||||
|
||||
/**
|
||||
* Recompacts row positions to be contiguous (0, 1, 2, ...) after batch deletions.
|
||||
* Recompacts row positions to be contiguous after batch deletions.
|
||||
*
|
||||
* When `minDeletedPos` is provided, only rows with `position >= minDeletedPos`
|
||||
* are re-numbered (starting from `minDeletedPos`). Rows before the earliest
|
||||
* deleted position are untouched since their position is unaffected.
|
||||
*
|
||||
* If `minDeletedPos` is omitted, the whole table is recompacted from 0.
|
||||
* Single-row deletes use the more efficient `position - 1` shift in {@link deleteRow}.
|
||||
*/
|
||||
async function recompactPositions(tableId: string, trx: DbTransaction) {
|
||||
async function recompactPositions(tableId: string, trx: DbTransaction, minDeletedPos?: number) {
|
||||
if (minDeletedPos === undefined) {
|
||||
await trx.execute(sql`
|
||||
UPDATE user_table_rows t
|
||||
SET position = r.new_pos
|
||||
FROM (
|
||||
SELECT id, ROW_NUMBER() OVER (ORDER BY position) - 1 AS new_pos
|
||||
FROM user_table_rows
|
||||
WHERE table_id = ${tableId}
|
||||
) r
|
||||
WHERE t.id = r.id AND t.table_id = ${tableId} AND t.position != r.new_pos
|
||||
`)
|
||||
return
|
||||
}
|
||||
|
||||
await trx.execute(sql`
|
||||
UPDATE user_table_rows t
|
||||
SET position = r.new_pos
|
||||
FROM (
|
||||
SELECT id, ROW_NUMBER() OVER (ORDER BY position) - 1 AS new_pos
|
||||
SELECT id, ${minDeletedPos}::int + ROW_NUMBER() OVER (ORDER BY position) - 1 AS new_pos
|
||||
FROM user_table_rows
|
||||
WHERE table_id = ${tableId}
|
||||
WHERE table_id = ${tableId} AND position >= ${minDeletedPos}
|
||||
) r
|
||||
WHERE t.id = r.id AND t.table_id = ${tableId} AND t.position != r.new_pos
|
||||
`)
|
||||
@@ -1538,7 +1631,7 @@ export async function deleteRowsByFilter(
|
||||
)
|
||||
|
||||
let query = db
|
||||
.select({ id: userTableRows.id })
|
||||
.select({ id: userTableRows.id, position: userTableRows.position })
|
||||
.from(userTableRows)
|
||||
.where(and(baseConditions, filterClause))
|
||||
|
||||
@@ -1553,8 +1646,13 @@ export async function deleteRowsByFilter(
|
||||
}
|
||||
|
||||
const rowIds = matchingRows.map((r) => r.id)
|
||||
const minDeletedPos = matchingRows.reduce(
|
||||
(min, r) => (r.position < min ? r.position : min),
|
||||
matchingRows[0].position
|
||||
)
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx, { statementMs: 60_000 })
|
||||
for (let i = 0; i < rowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
const batch = rowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
|
||||
await trx.delete(userTableRows).where(
|
||||
@@ -1569,7 +1667,7 @@ export async function deleteRowsByFilter(
|
||||
)
|
||||
}
|
||||
|
||||
await recompactPositions(data.tableId, trx)
|
||||
await recompactPositions(data.tableId, trx, minDeletedPos)
|
||||
})
|
||||
|
||||
logger.info(`[${requestId}] Deleted ${matchingRows.length} rows from table ${data.tableId}`)
|
||||
@@ -1594,7 +1692,8 @@ export async function deleteRowsByIds(
|
||||
const uniqueRequestedRowIds = Array.from(new Set(data.rowIds))
|
||||
|
||||
const deletedRows = await db.transaction(async (trx) => {
|
||||
const deleted: { id: string }[] = []
|
||||
await setTableTxTimeouts(trx, { statementMs: 60_000 })
|
||||
const deleted: { id: string; position: number }[] = []
|
||||
for (let i = 0; i < uniqueRequestedRowIds.length; i += TABLE_LIMITS.DELETE_BATCH_SIZE) {
|
||||
const batch = uniqueRequestedRowIds.slice(i, i + TABLE_LIMITS.DELETE_BATCH_SIZE)
|
||||
const rows = await trx
|
||||
@@ -1609,11 +1708,17 @@ export async function deleteRowsByIds(
|
||||
)}])`
|
||||
)
|
||||
)
|
||||
.returning({ id: userTableRows.id })
|
||||
.returning({ id: userTableRows.id, position: userTableRows.position })
|
||||
deleted.push(...rows)
|
||||
}
|
||||
|
||||
await recompactPositions(data.tableId, trx)
|
||||
if (deleted.length > 0) {
|
||||
const minDeletedPos = deleted.reduce(
|
||||
(min, r) => (r.position < min ? r.position : min),
|
||||
deleted[0].position
|
||||
)
|
||||
await recompactPositions(data.tableId, trx, minDeletedPos)
|
||||
}
|
||||
|
||||
return deleted
|
||||
})
|
||||
@@ -1691,8 +1796,13 @@ export async function renameColumn(
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const statementMs = scaledStatementTimeoutMs(table.rowCount ?? 0, {
|
||||
baseMs: 60_000,
|
||||
perRowMs: 2,
|
||||
})
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx, { statementMs })
|
||||
await trx
|
||||
.update(userTableDefinitions)
|
||||
.set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now })
|
||||
@@ -1752,8 +1862,13 @@ export async function deleteColumn(
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const statementMs = scaledStatementTimeoutMs(table.rowCount ?? 0, {
|
||||
baseMs: 60_000,
|
||||
perRowMs: 2,
|
||||
})
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx, { statementMs })
|
||||
await trx
|
||||
.update(userTableDefinitions)
|
||||
.set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now })
|
||||
@@ -1815,8 +1930,13 @@ export async function deleteColumns(
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const statementMs = scaledStatementTimeoutMs(table.rowCount ?? 0, {
|
||||
baseMs: 60_000,
|
||||
perRowMs: 2 * namesToDelete.size,
|
||||
})
|
||||
|
||||
await db.transaction(async (trx) => {
|
||||
await setTableTxTimeouts(trx, { statementMs })
|
||||
await trx
|
||||
.update(userTableDefinitions)
|
||||
.set({ schema: updatedSchema, metadata: updatedMetadata, updatedAt: now })
|
||||
|
||||
@@ -30,6 +30,14 @@ const ALLOWED_OPERATORS = new Set([
|
||||
* Builds a WHERE clause from a filter object.
|
||||
* Recursively processes logical operators ($or, $and) and field conditions.
|
||||
*
|
||||
* Index behavior: equality ($eq, $in) uses the JSONB containment operator (@>) and
|
||||
* can leverage the GIN index on `user_table_rows.data` (jsonb_path_ops). Range
|
||||
* operators ($gt, $gte, $lt, $lte) and pattern match ($contains) fall back to
|
||||
* text extraction via `data->>'field'`, which defeats the GIN index and produces
|
||||
* a sequential scan over the table's rows (bounded by a btree prefix on
|
||||
* `table_id`). Prefer equality filters on hot paths; assume range filters are
|
||||
* O(rows per table) until a per-column expression index is added.
|
||||
*
|
||||
* @param filter - Filter object with field conditions and logical operators
|
||||
* @param tableName - Table name for the query (e.g., 'user_table_rows')
|
||||
* @returns SQL WHERE clause or undefined if no filter specified
|
||||
|
||||
@@ -139,12 +139,18 @@ export interface QueryOptions {
|
||||
sort?: Sort
|
||||
limit?: number
|
||||
offset?: number
|
||||
/**
|
||||
* When true (default), runs a `COUNT(*)` and returns `totalCount` as a number.
|
||||
* Pass `false` to skip the count query (grid UI doesn't need it); `totalCount`
|
||||
* is returned as `null` to signal it was not computed.
|
||||
*/
|
||||
includeTotal?: boolean
|
||||
}
|
||||
|
||||
export interface QueryResult {
|
||||
rows: TableRow[]
|
||||
rowCount: number
|
||||
totalCount: number
|
||||
totalCount: number | null
|
||||
limit: number
|
||||
offset: number
|
||||
}
|
||||
|
||||
87
apps/sim/lib/uploads/core/upload-token.ts
Normal file
87
apps/sim/lib/uploads/core/upload-token.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import { safeCompare } from '@sim/security/compare'
|
||||
import { hmacSha256Base64 } from '@sim/security/hmac'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import type { StorageContext } from '@/lib/uploads/shared/types'
|
||||
|
||||
export interface UploadTokenPayload {
|
||||
uploadId: string
|
||||
key: string
|
||||
userId: string
|
||||
workspaceId: string
|
||||
context: StorageContext
|
||||
}
|
||||
|
||||
interface SignedPayload extends UploadTokenPayload {
|
||||
exp: number
|
||||
v: 1
|
||||
}
|
||||
|
||||
const toBase64Url = (input: string): string => Buffer.from(input, 'utf8').toString('base64url')
|
||||
|
||||
const fromBase64Url = (input: string): string => Buffer.from(input, 'base64url').toString('utf8')
|
||||
|
||||
const sign = (payload: string): string => hmacSha256Base64(payload, env.INTERNAL_API_SECRET)
|
||||
|
||||
/**
|
||||
* Sign an upload session token binding (uploadId, key, userId, workspaceId, context).
|
||||
* Used to prevent IDOR on multipart upload follow-up calls (get-part-urls, complete, abort).
|
||||
*/
|
||||
export function signUploadToken(payload: UploadTokenPayload, expiresInSeconds = 60 * 60): string {
|
||||
const signed: SignedPayload = {
|
||||
...payload,
|
||||
exp: Math.floor(Date.now() / 1000) + expiresInSeconds,
|
||||
v: 1,
|
||||
}
|
||||
const encoded = toBase64Url(JSON.stringify(signed))
|
||||
return `${encoded}.${sign(encoded)}`
|
||||
}
|
||||
|
||||
export type UploadTokenVerification =
|
||||
| { valid: true; payload: UploadTokenPayload }
|
||||
| { valid: false }
|
||||
|
||||
export function verifyUploadToken(token: string): UploadTokenVerification {
|
||||
if (typeof token !== 'string') {
|
||||
return { valid: false }
|
||||
}
|
||||
const parts = token.split('.')
|
||||
if (parts.length !== 2) return { valid: false }
|
||||
const [encoded, signature] = parts
|
||||
if (!encoded || !signature) return { valid: false }
|
||||
|
||||
const expected = sign(encoded)
|
||||
if (!safeCompare(signature, expected)) {
|
||||
return { valid: false }
|
||||
}
|
||||
|
||||
let parsed: SignedPayload
|
||||
try {
|
||||
parsed = JSON.parse(fromBase64Url(encoded)) as SignedPayload
|
||||
} catch {
|
||||
return { valid: false }
|
||||
}
|
||||
|
||||
if (
|
||||
parsed.v !== 1 ||
|
||||
typeof parsed.exp !== 'number' ||
|
||||
parsed.exp < Math.floor(Date.now() / 1000) ||
|
||||
typeof parsed.uploadId !== 'string' ||
|
||||
typeof parsed.key !== 'string' ||
|
||||
typeof parsed.userId !== 'string' ||
|
||||
typeof parsed.workspaceId !== 'string' ||
|
||||
typeof parsed.context !== 'string'
|
||||
) {
|
||||
return { valid: false }
|
||||
}
|
||||
|
||||
return {
|
||||
valid: true,
|
||||
payload: {
|
||||
uploadId: parsed.uploadId,
|
||||
key: parsed.key,
|
||||
userId: parsed.userId,
|
||||
workspaceId: parsed.workspaceId,
|
||||
context: parsed.context as StorageContext,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -2,16 +2,18 @@ import { createLogger } from '@sim/logger'
|
||||
import { safeCompare } from '@sim/security/compare'
|
||||
import { hmacSha256Hex } from '@sim/security/hmac'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getNotificationUrl, getProviderConfig } from '@/lib/webhooks/provider-subscription-utils'
|
||||
import type {
|
||||
AuthContext,
|
||||
DeleteSubscriptionContext,
|
||||
EventMatchContext,
|
||||
FormatInputContext,
|
||||
FormatInputResult,
|
||||
SubscriptionContext,
|
||||
SubscriptionResult,
|
||||
WebhookProviderHandler,
|
||||
} from '@/lib/webhooks/providers/types'
|
||||
import { createHmacVerifier } from '@/lib/webhooks/providers/utils'
|
||||
|
||||
const logger = createLogger('WebhookProvider:Ashby')
|
||||
|
||||
@@ -48,17 +50,74 @@ export const ashbyHandler: WebhookProviderHandler = {
|
||||
input: {
|
||||
...((b.data as Record<string, unknown>) || {}),
|
||||
action: b.action,
|
||||
data: b.data || {},
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
verifyAuth: createHmacVerifier({
|
||||
configKey: 'secretToken',
|
||||
headerName: 'ashby-signature',
|
||||
validateFn: validateAshbySignature,
|
||||
providerLabel: 'Ashby',
|
||||
}),
|
||||
verifyAuth({ request, rawBody, requestId, providerConfig }: AuthContext): NextResponse | null {
|
||||
const secretToken = (providerConfig.secretToken as string | undefined)?.trim()
|
||||
if (!secretToken) {
|
||||
logger.warn(
|
||||
`[${requestId}] Ashby webhook missing secretToken in providerConfig — rejecting request`
|
||||
)
|
||||
return new NextResponse(
|
||||
'Unauthorized - Ashby webhook signing secret is not configured. Re-save the trigger so a webhook can be registered.',
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
const signature = request.headers.get('ashby-signature')
|
||||
if (!signature) {
|
||||
logger.warn(`[${requestId}] Ashby webhook missing signature header`)
|
||||
return new NextResponse('Unauthorized - Missing Ashby signature', { status: 401 })
|
||||
}
|
||||
|
||||
if (!validateAshbySignature(secretToken, signature, rawBody)) {
|
||||
logger.warn(`[${requestId}] Ashby signature verification failed`, {
|
||||
signatureLength: signature.length,
|
||||
secretLength: secretToken.length,
|
||||
})
|
||||
return new NextResponse('Unauthorized - Invalid Ashby signature', { status: 401 })
|
||||
}
|
||||
|
||||
return null
|
||||
},
|
||||
|
||||
async matchEvent({
|
||||
webhook,
|
||||
body,
|
||||
requestId,
|
||||
providerConfig,
|
||||
}: EventMatchContext): Promise<boolean> {
|
||||
const triggerId = providerConfig.triggerId as string | undefined
|
||||
const obj = body as Record<string, unknown>
|
||||
const action = typeof obj?.action === 'string' ? obj.action : ''
|
||||
|
||||
if (action === 'ping') {
|
||||
logger.debug(`[${requestId}] Ashby ping event received. Skipping execution.`, {
|
||||
webhookId: webhook.id,
|
||||
triggerId,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
if (!triggerId) return true
|
||||
|
||||
const { isAshbyEventMatch } = await import('@/triggers/ashby/utils')
|
||||
if (!isAshbyEventMatch(triggerId, action)) {
|
||||
logger.debug(
|
||||
`[${requestId}] Ashby event mismatch for trigger ${triggerId}. Action: ${action || '(missing)'}. Skipping execution.`,
|
||||
{
|
||||
webhookId: webhook.id,
|
||||
triggerId,
|
||||
receivedAction: action,
|
||||
}
|
||||
)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
},
|
||||
|
||||
async createSubscription(ctx: SubscriptionContext): Promise<SubscriptionResult | undefined> {
|
||||
try {
|
||||
@@ -78,18 +137,12 @@ export const ashbyHandler: WebhookProviderHandler = {
|
||||
throw new Error('Trigger ID is required to create Ashby webhook.')
|
||||
}
|
||||
|
||||
const webhookTypeMap: Record<string, string> = {
|
||||
ashby_application_submit: 'applicationSubmit',
|
||||
ashby_candidate_stage_change: 'candidateStageChange',
|
||||
ashby_candidate_hire: 'candidateHire',
|
||||
ashby_candidate_delete: 'candidateDelete',
|
||||
ashby_job_create: 'jobCreate',
|
||||
ashby_offer_create: 'offerCreate',
|
||||
}
|
||||
|
||||
const webhookType = webhookTypeMap[triggerId]
|
||||
const { ASHBY_TRIGGER_ACTION_MAP } = await import('@/triggers/ashby/utils')
|
||||
const webhookType = ASHBY_TRIGGER_ACTION_MAP[triggerId]
|
||||
if (!webhookType) {
|
||||
throw new Error(`Unknown Ashby triggerId: ${triggerId}. Add it to webhookTypeMap.`)
|
||||
throw new Error(
|
||||
`Unknown Ashby triggerId: ${triggerId}. Add it to ASHBY_TRIGGER_ACTION_MAP.`
|
||||
)
|
||||
}
|
||||
|
||||
const notificationUrl = getNotificationUrl(ctx.webhook)
|
||||
|
||||
@@ -34,6 +34,7 @@ export const SUBBLOCK_ID_MIGRATIONS: Record<string, Record<string, string>> = {
|
||||
ashby: {
|
||||
emailType: '_removed_emailType',
|
||||
phoneType: '_removed_phoneType',
|
||||
filterCandidateId: '_removed_filterCandidateId',
|
||||
},
|
||||
rippling: {
|
||||
action: '_removed_action',
|
||||
|
||||
@@ -564,6 +564,18 @@ export async function updateFolderRecord(
|
||||
await db.update(workflowFolder).set(setData).where(eq(workflowFolder.id, folderId))
|
||||
}
|
||||
|
||||
export async function verifyFolderWorkspace(
|
||||
folderId: string,
|
||||
workspaceId: string
|
||||
): Promise<boolean> {
|
||||
const [row] = await db
|
||||
.select({ id: workflowFolder.id })
|
||||
.from(workflowFolder)
|
||||
.where(and(eq(workflowFolder.id, folderId), eq(workflowFolder.workspaceId, workspaceId)))
|
||||
.limit(1)
|
||||
return Boolean(row)
|
||||
}
|
||||
|
||||
export async function deleteFolderRecord(folderId: string): Promise<boolean> {
|
||||
const [folder] = await db
|
||||
.select({ parentId: workflowFolder.parentId })
|
||||
|
||||
@@ -183,6 +183,49 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
contextWindow: 1047576,
|
||||
releaseDate: '2025-04-14',
|
||||
},
|
||||
// GPT-5.5 family
|
||||
{
|
||||
id: 'gpt-5.5-pro',
|
||||
pricing: {
|
||||
input: 30.0,
|
||||
output: 180.0,
|
||||
updatedAt: '2026-04-23',
|
||||
},
|
||||
capabilities: {
|
||||
nativeStructuredOutputs: true,
|
||||
reasoningEffort: {
|
||||
values: ['none', 'low', 'medium', 'high', 'xhigh'],
|
||||
},
|
||||
verbosity: {
|
||||
values: ['low', 'medium', 'high'],
|
||||
},
|
||||
maxOutputTokens: 128000,
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
releaseDate: '2026-04-23',
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.5',
|
||||
pricing: {
|
||||
input: 5.0,
|
||||
cachedInput: 0.5,
|
||||
output: 30.0,
|
||||
updatedAt: '2026-04-23',
|
||||
},
|
||||
capabilities: {
|
||||
nativeStructuredOutputs: true,
|
||||
reasoningEffort: {
|
||||
values: ['none', 'low', 'medium', 'high', 'xhigh'],
|
||||
},
|
||||
verbosity: {
|
||||
values: ['low', 'medium', 'high'],
|
||||
},
|
||||
maxOutputTokens: 128000,
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
releaseDate: '2026-04-23',
|
||||
recommended: true,
|
||||
},
|
||||
// GPT-5.4 family
|
||||
{
|
||||
id: 'gpt-5.4-pro',
|
||||
@@ -219,7 +262,6 @@ export const PROVIDER_DEFINITIONS: Record<string, ProviderDefinition> = {
|
||||
},
|
||||
contextWindow: 1050000,
|
||||
releaseDate: '2026-03-05',
|
||||
recommended: true,
|
||||
},
|
||||
{
|
||||
id: 'gpt-5.4-mini',
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyCandidate } from '@/tools/ashby/types'
|
||||
import { CANDIDATE_OUTPUTS, mapCandidate } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyAddCandidateTagParams {
|
||||
@@ -7,9 +9,7 @@ interface AshbyAddCandidateTagParams {
|
||||
}
|
||||
|
||||
interface AshbyAddCandidateTagResponse extends ToolResponse {
|
||||
output: {
|
||||
success: boolean
|
||||
}
|
||||
output: AshbyCandidate
|
||||
}
|
||||
|
||||
export const addCandidateTagTool: ToolConfig<
|
||||
@@ -18,7 +18,7 @@ export const addCandidateTagTool: ToolConfig<
|
||||
> = {
|
||||
id: 'ashby_add_candidate_tag',
|
||||
name: 'Ashby Add Candidate Tag',
|
||||
description: 'Adds a tag to a candidate in Ashby.',
|
||||
description: 'Adds a tag to a candidate in Ashby and returns the updated candidate.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
@@ -50,8 +50,8 @@ export const addCandidateTagTool: ToolConfig<
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
candidateId: params.candidateId,
|
||||
tagId: params.tagId,
|
||||
candidateId: params.candidateId.trim(),
|
||||
tagId: params.tagId.trim(),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -64,13 +64,9 @@ export const addCandidateTagTool: ToolConfig<
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
success: true,
|
||||
},
|
||||
output: mapCandidate(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
success: { type: 'boolean', description: 'Whether the tag was successfully added' },
|
||||
},
|
||||
outputs: CANDIDATE_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyApplication } from '@/tools/ashby/types'
|
||||
import { APPLICATION_OUTPUTS, mapApplication } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyChangeApplicationStageParams {
|
||||
@@ -8,10 +10,7 @@ interface AshbyChangeApplicationStageParams {
|
||||
}
|
||||
|
||||
interface AshbyChangeApplicationStageResponse extends ToolResponse {
|
||||
output: {
|
||||
applicationId: string
|
||||
stageId: string | null
|
||||
}
|
||||
output: AshbyApplication
|
||||
}
|
||||
|
||||
export const changeApplicationStageTool: ToolConfig<
|
||||
@@ -61,10 +60,10 @@ export const changeApplicationStageTool: ToolConfig<
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
applicationId: params.applicationId,
|
||||
interviewStageId: params.interviewStageId,
|
||||
applicationId: params.applicationId.trim(),
|
||||
interviewStageId: params.interviewStageId.trim(),
|
||||
}
|
||||
if (params.archiveReasonId) body.archiveReasonId = params.archiveReasonId
|
||||
if (params.archiveReasonId) body.archiveReasonId = params.archiveReasonId.trim()
|
||||
return body
|
||||
},
|
||||
},
|
||||
@@ -76,19 +75,11 @@ export const changeApplicationStageTool: ToolConfig<
|
||||
throw new Error(data.errorInfo?.message || 'Failed to change application stage')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
applicationId: r.id ?? null,
|
||||
stageId: r.currentInterviewStage?.id ?? null,
|
||||
},
|
||||
output: mapApplication(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
applicationId: { type: 'string', description: 'Application UUID' },
|
||||
stageId: { type: 'string', description: 'New interview stage UUID' },
|
||||
},
|
||||
outputs: APPLICATION_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyApplication } from '@/tools/ashby/types'
|
||||
import { APPLICATION_OUTPUTS, mapApplication } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyCreateApplicationParams {
|
||||
@@ -12,9 +14,7 @@ interface AshbyCreateApplicationParams {
|
||||
}
|
||||
|
||||
interface AshbyCreateApplicationResponse extends ToolResponse {
|
||||
output: {
|
||||
applicationId: string
|
||||
}
|
||||
output: AshbyApplication
|
||||
}
|
||||
|
||||
export const createApplicationTool: ToolConfig<
|
||||
@@ -88,13 +88,13 @@ export const createApplicationTool: ToolConfig<
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
candidateId: params.candidateId,
|
||||
jobId: params.jobId,
|
||||
candidateId: params.candidateId.trim(),
|
||||
jobId: params.jobId.trim(),
|
||||
}
|
||||
if (params.interviewPlanId) body.interviewPlanId = params.interviewPlanId
|
||||
if (params.interviewStageId) body.interviewStageId = params.interviewStageId
|
||||
if (params.sourceId) body.sourceId = params.sourceId
|
||||
if (params.creditedToUserId) body.creditedToUserId = params.creditedToUserId
|
||||
if (params.interviewPlanId) body.interviewPlanId = params.interviewPlanId.trim()
|
||||
if (params.interviewStageId) body.interviewStageId = params.interviewStageId.trim()
|
||||
if (params.sourceId) body.sourceId = params.sourceId.trim()
|
||||
if (params.creditedToUserId) body.creditedToUserId = params.creditedToUserId.trim()
|
||||
if (params.createdAt) body.createdAt = params.createdAt
|
||||
return body
|
||||
},
|
||||
@@ -107,17 +107,11 @@ export const createApplicationTool: ToolConfig<
|
||||
throw new Error(data.errorInfo?.message || 'Failed to create application')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
applicationId: r.applicationId ?? null,
|
||||
},
|
||||
output: mapApplication(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
applicationId: { type: 'string', description: 'Created application UUID' },
|
||||
},
|
||||
outputs: APPLICATION_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AshbyCreateCandidateParams, AshbyCreateCandidateResponse } from '@/tools/ashby/types'
|
||||
import { CANDIDATE_OUTPUTS, mapCandidate } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyCreateCandidateParams, AshbyCreateCandidateResponse } from './types'
|
||||
|
||||
export const createCandidateTool: ToolConfig<
|
||||
AshbyCreateCandidateParams,
|
||||
@@ -25,7 +26,7 @@ export const createCandidateTool: ToolConfig<
|
||||
},
|
||||
email: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Primary email address for the candidate',
|
||||
},
|
||||
@@ -65,12 +66,12 @@ export const createCandidateTool: ToolConfig<
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
name: params.name,
|
||||
email: params.email,
|
||||
}
|
||||
if (params.email) body.email = params.email
|
||||
if (params.phoneNumber) body.phoneNumber = params.phoneNumber
|
||||
if (params.linkedInUrl) body.linkedInUrl = params.linkedInUrl
|
||||
if (params.githubUrl) body.githubUrl = params.githubUrl
|
||||
if (params.sourceId) body.sourceId = params.sourceId
|
||||
if (params.sourceId) body.sourceId = params.sourceId.trim()
|
||||
return body
|
||||
},
|
||||
},
|
||||
@@ -82,55 +83,11 @@ export const createCandidateTool: ToolConfig<
|
||||
throw new Error(data.errorInfo?.message || 'Failed to create candidate')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
name: r.name ?? null,
|
||||
primaryEmailAddress: r.primaryEmailAddress
|
||||
? {
|
||||
value: r.primaryEmailAddress.value ?? '',
|
||||
type: r.primaryEmailAddress.type ?? 'Other',
|
||||
isPrimary: r.primaryEmailAddress.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
primaryPhoneNumber: r.primaryPhoneNumber
|
||||
? {
|
||||
value: r.primaryPhoneNumber.value ?? '',
|
||||
type: r.primaryPhoneNumber.type ?? 'Other',
|
||||
isPrimary: r.primaryPhoneNumber.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
createdAt: r.createdAt ?? null,
|
||||
},
|
||||
output: mapCandidate(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Created candidate UUID' },
|
||||
name: { type: 'string', description: 'Full name' },
|
||||
primaryEmailAddress: {
|
||||
type: 'object',
|
||||
description: 'Primary email contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Email address' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary email' },
|
||||
},
|
||||
},
|
||||
primaryPhoneNumber: {
|
||||
type: 'object',
|
||||
description: 'Primary phone contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Phone number' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary phone' },
|
||||
},
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
},
|
||||
outputs: CANDIDATE_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import type { AshbyCreateNoteParams, AshbyCreateNoteResponse } from '@/tools/ashby/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyCreateNoteParams, AshbyCreateNoteResponse } from './types'
|
||||
|
||||
export const createNoteTool: ToolConfig<AshbyCreateNoteParams, AshbyCreateNoteResponse> = {
|
||||
id: 'ashby_create_note',
|
||||
@@ -51,7 +51,7 @@ export const createNoteTool: ToolConfig<AshbyCreateNoteParams, AshbyCreateNoteRe
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
candidateId: params.candidateId,
|
||||
candidateId: params.candidateId.trim(),
|
||||
sendNotifications: params.sendNotifications ?? false,
|
||||
}
|
||||
if (params.noteType === 'text/html') {
|
||||
@@ -74,16 +74,42 @@ export const createNoteTool: ToolConfig<AshbyCreateNoteParams, AshbyCreateNoteRe
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
const author = r.author
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
noteId: r.id ?? null,
|
||||
id: r.id ?? '',
|
||||
createdAt: r.createdAt ?? null,
|
||||
isPrivate: r.isPrivate ?? false,
|
||||
content: r.content ?? null,
|
||||
author: author
|
||||
? {
|
||||
id: author.id ?? '',
|
||||
firstName: author.firstName ?? null,
|
||||
lastName: author.lastName ?? null,
|
||||
email: author.email ?? null,
|
||||
}
|
||||
: null,
|
||||
},
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
noteId: { type: 'string', description: 'Created note UUID' },
|
||||
id: { type: 'string', description: 'Created note UUID' },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp', optional: true },
|
||||
isPrivate: { type: 'boolean', description: 'Whether the note is private' },
|
||||
content: { type: 'string', description: 'Note content', optional: true },
|
||||
author: {
|
||||
type: 'object',
|
||||
description: 'Author of the note',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Author user UUID' },
|
||||
firstName: { type: 'string', description: 'Author first name', optional: true },
|
||||
lastName: { type: 'string', description: 'Author last name', optional: true },
|
||||
email: { type: 'string', description: 'Author email', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyApplication } from '@/tools/ashby/types'
|
||||
import { APPLICATION_OUTPUTS, mapApplication } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyGetApplicationParams {
|
||||
@@ -6,35 +8,7 @@ interface AshbyGetApplicationParams {
|
||||
}
|
||||
|
||||
interface AshbyGetApplicationResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
status: string
|
||||
candidate: {
|
||||
id: string
|
||||
name: string
|
||||
}
|
||||
job: {
|
||||
id: string
|
||||
title: string
|
||||
}
|
||||
currentInterviewStage: {
|
||||
id: string
|
||||
title: string
|
||||
type: string
|
||||
} | null
|
||||
source: {
|
||||
id: string
|
||||
title: string
|
||||
} | null
|
||||
archiveReason: {
|
||||
id: string
|
||||
text: string
|
||||
reasonType: string
|
||||
} | null
|
||||
archivedAt: string | null
|
||||
createdAt: string
|
||||
updatedAt: string
|
||||
}
|
||||
output: AshbyApplication
|
||||
}
|
||||
|
||||
export const getApplicationTool: ToolConfig<
|
||||
@@ -69,7 +43,7 @@ export const getApplicationTool: ToolConfig<
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
applicationId: params.applicationId,
|
||||
applicationId: params.applicationId.trim(),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -80,98 +54,11 @@ export const getApplicationTool: ToolConfig<
|
||||
throw new Error(data.errorInfo?.message || 'Failed to get application')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
status: r.status ?? null,
|
||||
candidate: {
|
||||
id: r.candidate?.id ?? null,
|
||||
name: r.candidate?.name ?? null,
|
||||
},
|
||||
job: {
|
||||
id: r.job?.id ?? null,
|
||||
title: r.job?.title ?? null,
|
||||
},
|
||||
currentInterviewStage: r.currentInterviewStage
|
||||
? {
|
||||
id: r.currentInterviewStage.id ?? null,
|
||||
title: r.currentInterviewStage.title ?? null,
|
||||
type: r.currentInterviewStage.type ?? null,
|
||||
}
|
||||
: null,
|
||||
source: r.source
|
||||
? {
|
||||
id: r.source.id ?? null,
|
||||
title: r.source.title ?? null,
|
||||
}
|
||||
: null,
|
||||
archiveReason: r.archiveReason
|
||||
? {
|
||||
id: r.archiveReason.id ?? null,
|
||||
text: r.archiveReason.text ?? null,
|
||||
reasonType: r.archiveReason.reasonType ?? null,
|
||||
}
|
||||
: null,
|
||||
archivedAt: r.archivedAt ?? null,
|
||||
createdAt: r.createdAt ?? null,
|
||||
updatedAt: r.updatedAt ?? null,
|
||||
},
|
||||
output: mapApplication(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Application UUID' },
|
||||
status: { type: 'string', description: 'Application status (Active, Hired, Archived, Lead)' },
|
||||
candidate: {
|
||||
type: 'object',
|
||||
description: 'Associated candidate',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Candidate UUID' },
|
||||
name: { type: 'string', description: 'Candidate name' },
|
||||
},
|
||||
},
|
||||
job: {
|
||||
type: 'object',
|
||||
description: 'Associated job',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Job UUID' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
},
|
||||
},
|
||||
currentInterviewStage: {
|
||||
type: 'object',
|
||||
description: 'Current interview stage',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Stage UUID' },
|
||||
title: { type: 'string', description: 'Stage title' },
|
||||
type: { type: 'string', description: 'Stage type' },
|
||||
},
|
||||
},
|
||||
source: {
|
||||
type: 'object',
|
||||
description: 'Application source',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Source UUID' },
|
||||
title: { type: 'string', description: 'Source title' },
|
||||
},
|
||||
},
|
||||
archiveReason: {
|
||||
type: 'object',
|
||||
description: 'Reason for archival',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Reason UUID' },
|
||||
text: { type: 'string', description: 'Reason text' },
|
||||
reasonType: { type: 'string', description: 'Reason type' },
|
||||
},
|
||||
},
|
||||
archivedAt: { type: 'string', description: 'ISO 8601 archive timestamp', optional: true },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
outputs: APPLICATION_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AshbyGetCandidateParams, AshbyGetCandidateResponse } from '@/tools/ashby/types'
|
||||
import { CANDIDATE_OUTPUTS, mapCandidate } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyGetCandidateParams, AshbyGetCandidateResponse } from './types'
|
||||
|
||||
export const getCandidateTool: ToolConfig<AshbyGetCandidateParams, AshbyGetCandidateResponse> = {
|
||||
id: 'ashby_get_candidate',
|
||||
@@ -30,7 +31,7 @@ export const getCandidateTool: ToolConfig<AshbyGetCandidateParams, AshbyGetCandi
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
candidateId: params.candidateId.trim(),
|
||||
id: params.candidateId.trim(),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -41,94 +42,11 @@ export const getCandidateTool: ToolConfig<AshbyGetCandidateParams, AshbyGetCandi
|
||||
throw new Error(data.errorInfo?.message || 'Failed to get candidate')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
name: r.name ?? null,
|
||||
primaryEmailAddress: r.primaryEmailAddress
|
||||
? {
|
||||
value: r.primaryEmailAddress.value ?? '',
|
||||
type: r.primaryEmailAddress.type ?? 'Other',
|
||||
isPrimary: r.primaryEmailAddress.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
primaryPhoneNumber: r.primaryPhoneNumber
|
||||
? {
|
||||
value: r.primaryPhoneNumber.value ?? '',
|
||||
type: r.primaryPhoneNumber.type ?? 'Other',
|
||||
isPrimary: r.primaryPhoneNumber.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
profileUrl: r.profileUrl ?? null,
|
||||
position: r.position ?? null,
|
||||
company: r.company ?? null,
|
||||
linkedInUrl:
|
||||
(r.socialLinks ?? []).find((l: { type: string }) => l.type === 'LinkedIn')?.url ?? null,
|
||||
githubUrl:
|
||||
(r.socialLinks ?? []).find((l: { type: string }) => l.type === 'GitHub')?.url ?? null,
|
||||
tags: (r.tags ?? []).map((t: { id: string; title: string }) => ({
|
||||
id: t.id,
|
||||
title: t.title,
|
||||
})),
|
||||
applicationIds: r.applicationIds ?? [],
|
||||
createdAt: r.createdAt ?? null,
|
||||
updatedAt: r.updatedAt ?? null,
|
||||
},
|
||||
output: mapCandidate(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Candidate UUID' },
|
||||
name: { type: 'string', description: 'Full name' },
|
||||
primaryEmailAddress: {
|
||||
type: 'object',
|
||||
description: 'Primary email contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Email address' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary email' },
|
||||
},
|
||||
},
|
||||
primaryPhoneNumber: {
|
||||
type: 'object',
|
||||
description: 'Primary phone contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Phone number' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary phone' },
|
||||
},
|
||||
},
|
||||
profileUrl: {
|
||||
type: 'string',
|
||||
description: 'URL to the candidate Ashby profile',
|
||||
optional: true,
|
||||
},
|
||||
position: { type: 'string', description: 'Current position or title', optional: true },
|
||||
company: { type: 'string', description: 'Current company', optional: true },
|
||||
linkedInUrl: { type: 'string', description: 'LinkedIn profile URL', optional: true },
|
||||
githubUrl: { type: 'string', description: 'GitHub profile URL', optional: true },
|
||||
tags: {
|
||||
type: 'array',
|
||||
description: 'Tags applied to the candidate',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Tag UUID' },
|
||||
title: { type: 'string', description: 'Tag title' },
|
||||
},
|
||||
},
|
||||
},
|
||||
applicationIds: {
|
||||
type: 'array',
|
||||
description: 'IDs of associated applications',
|
||||
items: { type: 'string', description: 'Application UUID' },
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
outputs: CANDIDATE_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AshbyGetJobParams, AshbyGetJobResponse } from '@/tools/ashby/types'
|
||||
import { JOB_OUTPUTS, mapJob } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyGetJobParams, AshbyGetJobResponse } from './types'
|
||||
|
||||
export const getJobTool: ToolConfig<AshbyGetJobParams, AshbyGetJobResponse> = {
|
||||
id: 'ashby_get_job',
|
||||
@@ -30,7 +31,7 @@ export const getJobTool: ToolConfig<AshbyGetJobParams, AshbyGetJobResponse> = {
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
jobId: params.jobId.trim(),
|
||||
id: params.jobId.trim(),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -41,43 +42,11 @@ export const getJobTool: ToolConfig<AshbyGetJobParams, AshbyGetJobResponse> = {
|
||||
throw new Error(data.errorInfo?.message || 'Failed to get job')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
title: r.title ?? null,
|
||||
status: r.status ?? null,
|
||||
employmentType: r.employmentType ?? null,
|
||||
departmentId: r.departmentId ?? null,
|
||||
locationId: r.locationId ?? null,
|
||||
descriptionPlain: r.descriptionPlain ?? null,
|
||||
isArchived: r.isArchived ?? false,
|
||||
createdAt: r.createdAt ?? null,
|
||||
updatedAt: r.updatedAt ?? null,
|
||||
},
|
||||
output: mapJob(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Job UUID' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
status: { type: 'string', description: 'Job status (Open, Closed, Draft, Archived)' },
|
||||
employmentType: {
|
||||
type: 'string',
|
||||
description: 'Employment type (FullTime, PartTime, Intern, Contract, Temporary)',
|
||||
optional: true,
|
||||
},
|
||||
departmentId: { type: 'string', description: 'Department UUID', optional: true },
|
||||
locationId: { type: 'string', description: 'Location UUID', optional: true },
|
||||
descriptionPlain: {
|
||||
type: 'string',
|
||||
description: 'Job description in plain text',
|
||||
optional: true,
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the job is archived' },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
outputs: JOB_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -3,20 +3,80 @@ import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
interface AshbyGetJobPostingParams {
|
||||
apiKey: string
|
||||
jobPostingId: string
|
||||
expandApplicationFormDefinition?: boolean
|
||||
expandSurveyFormDefinitions?: boolean
|
||||
}
|
||||
|
||||
interface AshbyDescriptionPart {
|
||||
html: string | null
|
||||
plain: string | null
|
||||
}
|
||||
|
||||
interface AshbyJobPosting {
|
||||
id: string
|
||||
title: string
|
||||
descriptionPlain: string | null
|
||||
descriptionHtml: string | null
|
||||
descriptionSocial: string | null
|
||||
descriptionParts: {
|
||||
descriptionOpening: AshbyDescriptionPart | null
|
||||
descriptionBody: AshbyDescriptionPart | null
|
||||
descriptionClosing: AshbyDescriptionPart | null
|
||||
} | null
|
||||
departmentName: string | null
|
||||
teamName: string | null
|
||||
teamNameHierarchy: string[]
|
||||
jobId: string | null
|
||||
locationName: string | null
|
||||
locationIds: {
|
||||
primaryLocationId: string | null
|
||||
secondaryLocationIds: string[]
|
||||
} | null
|
||||
address: {
|
||||
postalAddress: {
|
||||
addressCountry: string | null
|
||||
addressRegion: string | null
|
||||
addressLocality: string | null
|
||||
postalCode: string | null
|
||||
streetAddress: string | null
|
||||
} | null
|
||||
} | null
|
||||
isRemote: boolean
|
||||
workplaceType: string | null
|
||||
employmentType: string | null
|
||||
isListed: boolean
|
||||
suppressDescriptionOpening: boolean
|
||||
suppressDescriptionClosing: boolean
|
||||
publishedDate: string | null
|
||||
applicationDeadline: string | null
|
||||
externalLink: string | null
|
||||
applyLink: string | null
|
||||
compensation: {
|
||||
compensationTierSummary: string | null
|
||||
summaryComponents: Array<{
|
||||
summary: string | null
|
||||
compensationTypeLabel: string | null
|
||||
interval: string | null
|
||||
currencyCode: string | null
|
||||
minValue: number | null
|
||||
maxValue: number | null
|
||||
}>
|
||||
shouldDisplayCompensationOnJobBoard: boolean
|
||||
} | null
|
||||
applicationLimitCalloutHtml: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
interface AshbyGetJobPostingResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
title: string
|
||||
jobId: string | null
|
||||
locationName: string | null
|
||||
departmentName: string | null
|
||||
employmentType: string | null
|
||||
descriptionPlain: string | null
|
||||
isListed: boolean
|
||||
publishedDate: string | null
|
||||
externalLink: string | null
|
||||
output: AshbyJobPosting
|
||||
}
|
||||
|
||||
function mapDescriptionPart(raw: unknown): AshbyDescriptionPart | null {
|
||||
if (!raw || typeof raw !== 'object') return null
|
||||
const p = raw as Record<string, unknown>
|
||||
return {
|
||||
html: (p.html as string) ?? null,
|
||||
plain: (p.plain as string) ?? null,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,6 +99,18 @@ export const getJobPostingTool: ToolConfig<AshbyGetJobPostingParams, AshbyGetJob
|
||||
visibility: 'user-or-llm',
|
||||
description: 'The UUID of the job posting to fetch',
|
||||
},
|
||||
expandApplicationFormDefinition: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Include application form definition in the response',
|
||||
},
|
||||
expandSurveyFormDefinitions: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Include survey form definitions in the response',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -48,9 +120,18 @@ export const getJobPostingTool: ToolConfig<AshbyGetJobPostingParams, AshbyGetJob
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
jobPostingId: params.jobPostingId,
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
jobPostingId: params.jobPostingId.trim(),
|
||||
}
|
||||
if (params.expandApplicationFormDefinition !== undefined) {
|
||||
body.expandApplicationFormDefinition = params.expandApplicationFormDefinition
|
||||
}
|
||||
if (params.expandSurveyFormDefinitions !== undefined) {
|
||||
body.expandSurveyFormDefinitions = params.expandSurveyFormDefinitions
|
||||
}
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
@@ -60,21 +141,90 @@ export const getJobPostingTool: ToolConfig<AshbyGetJobPostingParams, AshbyGetJob
|
||||
throw new Error(data.errorInfo?.message || 'Failed to get job posting')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
const r = (data.results ?? {}) as Record<string, unknown> & {
|
||||
descriptionParts?: Record<string, unknown>
|
||||
locationIds?: { primaryLocationId?: string; secondaryLocationIds?: string[] }
|
||||
address?: { postalAddress?: Record<string, unknown> }
|
||||
compensation?: Record<string, unknown> & {
|
||||
summaryComponents?: Array<Record<string, unknown>>
|
||||
}
|
||||
}
|
||||
|
||||
const pa = r.address?.postalAddress
|
||||
const comp = r.compensation
|
||||
const summaryComponents = Array.isArray(comp?.summaryComponents) ? comp.summaryComponents : []
|
||||
const descParts = r.descriptionParts
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
title: r.jobTitle ?? r.title ?? null,
|
||||
jobId: r.jobId ?? null,
|
||||
locationName: r.locationName ?? null,
|
||||
departmentName: r.departmentName ?? null,
|
||||
employmentType: r.employmentType ?? null,
|
||||
descriptionPlain: r.descriptionPlain ?? r.description ?? null,
|
||||
isListed: r.isListed ?? false,
|
||||
publishedDate: r.publishedDate ?? null,
|
||||
externalLink: r.externalLink ?? null,
|
||||
id: (r.id as string) ?? '',
|
||||
title: (r.title as string) ?? '',
|
||||
descriptionPlain: (r.descriptionPlain as string) ?? null,
|
||||
descriptionHtml: (r.descriptionHtml as string) ?? null,
|
||||
descriptionSocial: (r.descriptionSocial as string) ?? null,
|
||||
descriptionParts: descParts
|
||||
? {
|
||||
descriptionOpening: mapDescriptionPart(descParts.descriptionOpening),
|
||||
descriptionBody: mapDescriptionPart(descParts.descriptionBody),
|
||||
descriptionClosing: mapDescriptionPart(descParts.descriptionClosing),
|
||||
}
|
||||
: null,
|
||||
departmentName: (r.departmentName as string) ?? null,
|
||||
teamName: (r.teamName as string) ?? null,
|
||||
teamNameHierarchy: Array.isArray(r.teamNameHierarchy)
|
||||
? (r.teamNameHierarchy as string[])
|
||||
: [],
|
||||
jobId: (r.jobId as string) ?? null,
|
||||
locationName: (r.locationName as string) ?? null,
|
||||
locationIds: r.locationIds
|
||||
? {
|
||||
primaryLocationId: r.locationIds.primaryLocationId ?? null,
|
||||
secondaryLocationIds: Array.isArray(r.locationIds.secondaryLocationIds)
|
||||
? r.locationIds.secondaryLocationIds
|
||||
: [],
|
||||
}
|
||||
: null,
|
||||
address: r.address
|
||||
? {
|
||||
postalAddress: pa
|
||||
? {
|
||||
addressCountry: (pa.addressCountry as string) ?? null,
|
||||
addressRegion: (pa.addressRegion as string) ?? null,
|
||||
addressLocality: (pa.addressLocality as string) ?? null,
|
||||
postalCode: (pa.postalCode as string) ?? null,
|
||||
streetAddress: (pa.streetAddress as string) ?? null,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
: null,
|
||||
isRemote: (r.isRemote as boolean) ?? false,
|
||||
workplaceType: (r.workplaceType as string) ?? null,
|
||||
employmentType: (r.employmentType as string) ?? null,
|
||||
isListed: (r.isListed as boolean) ?? false,
|
||||
suppressDescriptionOpening: (r.suppressDescriptionOpening as boolean) ?? false,
|
||||
suppressDescriptionClosing: (r.suppressDescriptionClosing as boolean) ?? false,
|
||||
publishedDate: (r.publishedDate as string) ?? null,
|
||||
applicationDeadline: (r.applicationDeadline as string) ?? null,
|
||||
externalLink: (r.externalLink as string) ?? null,
|
||||
applyLink: (r.applyLink as string) ?? null,
|
||||
compensation: comp
|
||||
? {
|
||||
compensationTierSummary: (comp.compensationTierSummary as string) ?? null,
|
||||
summaryComponents: summaryComponents.map((c) => ({
|
||||
summary: (c.summary as string) ?? null,
|
||||
compensationTypeLabel: (c.compensationTypeLabel as string) ?? null,
|
||||
interval: (c.interval as string) ?? null,
|
||||
currencyCode: (c.currencyCode as string) ?? null,
|
||||
minValue: (c.minValue as number) ?? null,
|
||||
maxValue: (c.maxValue as number) ?? null,
|
||||
})),
|
||||
shouldDisplayCompensationOnJobBoard:
|
||||
(comp.shouldDisplayCompensationOnJobBoard as boolean) ?? false,
|
||||
}
|
||||
: null,
|
||||
applicationLimitCalloutHtml: (r.applicationLimitCalloutHtml as string) ?? null,
|
||||
updatedAt: (r.updatedAt as string) ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -82,25 +232,188 @@ export const getJobPostingTool: ToolConfig<AshbyGetJobPostingParams, AshbyGetJob
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Job posting UUID' },
|
||||
title: { type: 'string', description: 'Job posting title' },
|
||||
jobId: { type: 'string', description: 'Associated job UUID', optional: true },
|
||||
locationName: { type: 'string', description: 'Location name', optional: true },
|
||||
departmentName: { type: 'string', description: 'Department name', optional: true },
|
||||
employmentType: {
|
||||
type: 'string',
|
||||
description: 'Employment type (e.g. FullTime, PartTime, Contract)',
|
||||
optional: true,
|
||||
},
|
||||
descriptionPlain: {
|
||||
type: 'string',
|
||||
description: 'Job posting description in plain text',
|
||||
description: 'Full description in plain text',
|
||||
optional: true,
|
||||
},
|
||||
isListed: { type: 'boolean', description: 'Whether the posting is publicly listed' },
|
||||
descriptionHtml: {
|
||||
type: 'string',
|
||||
description: 'Full description in HTML',
|
||||
optional: true,
|
||||
},
|
||||
descriptionSocial: {
|
||||
type: 'string',
|
||||
description: 'Shortened description for social sharing (max 200 chars)',
|
||||
optional: true,
|
||||
},
|
||||
descriptionParts: {
|
||||
type: 'object',
|
||||
description: 'Description broken into opening, body, and closing sections',
|
||||
optional: true,
|
||||
properties: {
|
||||
descriptionOpening: {
|
||||
type: 'object',
|
||||
description: 'Opening (from Job Boards theme settings)',
|
||||
optional: true,
|
||||
properties: {
|
||||
html: { type: 'string', description: 'HTML content', optional: true },
|
||||
plain: { type: 'string', description: 'Plain text content', optional: true },
|
||||
},
|
||||
},
|
||||
descriptionBody: {
|
||||
type: 'object',
|
||||
description: 'Main description body',
|
||||
optional: true,
|
||||
properties: {
|
||||
html: { type: 'string', description: 'HTML content', optional: true },
|
||||
plain: { type: 'string', description: 'Plain text content', optional: true },
|
||||
},
|
||||
},
|
||||
descriptionClosing: {
|
||||
type: 'object',
|
||||
description: 'Closing (from Job Boards theme settings)',
|
||||
optional: true,
|
||||
properties: {
|
||||
html: { type: 'string', description: 'HTML content', optional: true },
|
||||
plain: { type: 'string', description: 'Plain text content', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
departmentName: { type: 'string', description: 'Department name', optional: true },
|
||||
teamName: { type: 'string', description: 'Team name', optional: true },
|
||||
teamNameHierarchy: {
|
||||
type: 'array',
|
||||
description: 'Hierarchy of team names from root to team',
|
||||
items: { type: 'string', description: 'Team name' },
|
||||
},
|
||||
jobId: { type: 'string', description: 'Associated job UUID', optional: true },
|
||||
locationName: { type: 'string', description: 'Primary location name', optional: true },
|
||||
locationIds: {
|
||||
type: 'object',
|
||||
description: 'Primary and secondary location UUIDs',
|
||||
optional: true,
|
||||
properties: {
|
||||
primaryLocationId: {
|
||||
type: 'string',
|
||||
description: 'Primary location UUID',
|
||||
optional: true,
|
||||
},
|
||||
secondaryLocationIds: {
|
||||
type: 'array',
|
||||
description: 'Secondary location UUIDs',
|
||||
items: { type: 'string', description: 'Location UUID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
address: {
|
||||
type: 'object',
|
||||
description: 'Postal address of the posting location',
|
||||
optional: true,
|
||||
properties: {
|
||||
postalAddress: {
|
||||
type: 'object',
|
||||
description: 'Structured postal address',
|
||||
optional: true,
|
||||
properties: {
|
||||
addressCountry: { type: 'string', description: 'Country', optional: true },
|
||||
addressRegion: { type: 'string', description: 'State or region', optional: true },
|
||||
addressLocality: { type: 'string', description: 'City or locality', optional: true },
|
||||
postalCode: { type: 'string', description: 'Postal code', optional: true },
|
||||
streetAddress: { type: 'string', description: 'Street address', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
isRemote: { type: 'boolean', description: 'Whether the posting is remote' },
|
||||
workplaceType: {
|
||||
type: 'string',
|
||||
description: 'Workplace type (OnSite, Remote, Hybrid)',
|
||||
optional: true,
|
||||
},
|
||||
employmentType: {
|
||||
type: 'string',
|
||||
description: 'Employment type (FullTime, PartTime, Intern, Contract, Temporary)',
|
||||
optional: true,
|
||||
},
|
||||
isListed: { type: 'boolean', description: 'Whether publicly listed on the job board' },
|
||||
suppressDescriptionOpening: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the theme opening is hidden on this posting',
|
||||
},
|
||||
suppressDescriptionClosing: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the theme closing is hidden on this posting',
|
||||
},
|
||||
publishedDate: { type: 'string', description: 'ISO 8601 published date', optional: true },
|
||||
applicationDeadline: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 application deadline',
|
||||
optional: true,
|
||||
},
|
||||
externalLink: {
|
||||
type: 'string',
|
||||
description: 'External link to the job posting',
|
||||
optional: true,
|
||||
},
|
||||
applyLink: {
|
||||
type: 'string',
|
||||
description: 'Direct apply link',
|
||||
optional: true,
|
||||
},
|
||||
compensation: {
|
||||
type: 'object',
|
||||
description: 'Compensation details for the posting',
|
||||
optional: true,
|
||||
properties: {
|
||||
compensationTierSummary: {
|
||||
type: 'string',
|
||||
description: 'Human-readable tier summary',
|
||||
optional: true,
|
||||
},
|
||||
summaryComponents: {
|
||||
type: 'array',
|
||||
description: 'Structured compensation components',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
summary: { type: 'string', description: 'Component summary', optional: true },
|
||||
compensationTypeLabel: {
|
||||
type: 'string',
|
||||
description: 'Component type label (Salary, Commission, Bonus, Equity, etc.)',
|
||||
optional: true,
|
||||
},
|
||||
interval: {
|
||||
type: 'string',
|
||||
description: 'Payment interval (e.g. annual, hourly)',
|
||||
optional: true,
|
||||
},
|
||||
currencyCode: {
|
||||
type: 'string',
|
||||
description: 'ISO 4217 currency code',
|
||||
optional: true,
|
||||
},
|
||||
minValue: { type: 'number', description: 'Minimum value', optional: true },
|
||||
maxValue: { type: 'number', description: 'Maximum value', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
shouldDisplayCompensationOnJobBoard: {
|
||||
type: 'boolean',
|
||||
description: 'Whether compensation is shown on the job board',
|
||||
},
|
||||
},
|
||||
},
|
||||
applicationLimitCalloutHtml: {
|
||||
type: 'string',
|
||||
description: 'HTML callout shown when application limit is reached',
|
||||
optional: true,
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 last update timestamp',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyOffer } from '@/tools/ashby/types'
|
||||
import { mapOffer, OFFER_OUTPUTS } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyGetOfferParams {
|
||||
@@ -6,19 +8,7 @@ interface AshbyGetOfferParams {
|
||||
}
|
||||
|
||||
interface AshbyGetOfferResponse extends ToolResponse {
|
||||
output: {
|
||||
id: string
|
||||
offerStatus: string
|
||||
acceptanceStatus: string | null
|
||||
applicationId: string | null
|
||||
startDate: string | null
|
||||
salary: {
|
||||
currencyCode: string
|
||||
value: number
|
||||
} | null
|
||||
openingId: string | null
|
||||
createdAt: string | null
|
||||
}
|
||||
output: AshbyOffer
|
||||
}
|
||||
|
||||
export const getOfferTool: ToolConfig<AshbyGetOfferParams, AshbyGetOfferResponse> = {
|
||||
@@ -50,7 +40,7 @@ export const getOfferTool: ToolConfig<AshbyGetOfferParams, AshbyGetOfferResponse
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: (params) => ({
|
||||
offerId: params.offerId,
|
||||
offerId: params.offerId.trim(),
|
||||
}),
|
||||
},
|
||||
|
||||
@@ -61,56 +51,11 @@ export const getOfferTool: ToolConfig<AshbyGetOfferParams, AshbyGetOfferResponse
|
||||
throw new Error(data.errorInfo?.message || 'Failed to get offer')
|
||||
}
|
||||
|
||||
const r = data.results
|
||||
const v = r.latestVersion
|
||||
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
id: r.id ?? null,
|
||||
offerStatus: r.offerStatus ?? null,
|
||||
acceptanceStatus: r.acceptanceStatus ?? null,
|
||||
applicationId: r.applicationId ?? null,
|
||||
startDate: v?.startDate ?? null,
|
||||
salary: v?.salary
|
||||
? {
|
||||
currencyCode: v.salary.currencyCode ?? null,
|
||||
value: v.salary.value ?? null,
|
||||
}
|
||||
: null,
|
||||
openingId: v?.openingId ?? null,
|
||||
createdAt: v?.createdAt ?? null,
|
||||
},
|
||||
output: mapOffer(data.results),
|
||||
}
|
||||
},
|
||||
|
||||
outputs: {
|
||||
id: { type: 'string', description: 'Offer UUID' },
|
||||
offerStatus: {
|
||||
type: 'string',
|
||||
description: 'Offer status (e.g. WaitingOnCandidateResponse, CandidateAccepted)',
|
||||
},
|
||||
acceptanceStatus: {
|
||||
type: 'string',
|
||||
description: 'Acceptance status (e.g. Accepted, Declined, Pending)',
|
||||
optional: true,
|
||||
},
|
||||
applicationId: { type: 'string', description: 'Associated application UUID', optional: true },
|
||||
startDate: { type: 'string', description: 'Offer start date', optional: true },
|
||||
salary: {
|
||||
type: 'object',
|
||||
description: 'Salary details',
|
||||
optional: true,
|
||||
properties: {
|
||||
currencyCode: { type: 'string', description: 'ISO 4217 currency code' },
|
||||
value: { type: 'number', description: 'Salary amount' },
|
||||
},
|
||||
},
|
||||
openingId: { type: 'string', description: 'Associated opening UUID', optional: true },
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 creation timestamp (from latest version)',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
outputs: OFFER_OUTPUTS,
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import type {
|
||||
AshbyListApplicationsParams,
|
||||
AshbyListApplicationsResponse,
|
||||
} from '@/tools/ashby/types'
|
||||
import { APPLICATION_OUTPUTS, mapApplication } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyListApplicationsParams, AshbyListApplicationsResponse } from './types'
|
||||
|
||||
export const listApplicationsTool: ToolConfig<
|
||||
AshbyListApplicationsParams,
|
||||
@@ -8,7 +12,7 @@ export const listApplicationsTool: ToolConfig<
|
||||
id: 'ashby_list_applications',
|
||||
name: 'Ashby List Applications',
|
||||
description:
|
||||
'Lists all applications in an Ashby organization with pagination and optional filters for status, job, candidate, and creation date.',
|
||||
'Lists all applications in an Ashby organization with pagination and optional filters for status, job, and creation date.',
|
||||
version: '1.0.0',
|
||||
|
||||
params: {
|
||||
@@ -42,12 +46,6 @@ export const listApplicationsTool: ToolConfig<
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter applications by a specific job UUID',
|
||||
},
|
||||
candidateId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Filter applications by a specific candidate UUID',
|
||||
},
|
||||
createdAfter: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
@@ -68,10 +66,12 @@ export const listApplicationsTool: ToolConfig<
|
||||
const body: Record<string, unknown> = {}
|
||||
if (params.cursor) body.cursor = params.cursor
|
||||
if (params.perPage) body.limit = params.perPage
|
||||
if (params.status) body.status = [params.status]
|
||||
if (params.jobId) body.jobId = params.jobId
|
||||
if (params.candidateId) body.candidateId = params.candidateId
|
||||
if (params.createdAfter) body.createdAfter = new Date(params.createdAfter).getTime()
|
||||
if (params.status) body.status = params.status
|
||||
if (params.jobId) body.jobId = params.jobId.trim()
|
||||
if (params.createdAfter) {
|
||||
const ms = new Date(params.createdAfter).getTime()
|
||||
if (!Number.isNaN(ms)) body.createdAfter = ms
|
||||
}
|
||||
return body
|
||||
},
|
||||
},
|
||||
@@ -86,42 +86,7 @@ export const listApplicationsTool: ToolConfig<
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
applications: (data.results ?? []).map(
|
||||
(
|
||||
a: Record<string, unknown> & {
|
||||
candidate?: { id?: string; name?: string }
|
||||
job?: { id?: string; title?: string }
|
||||
currentInterviewStage?: { id?: string; title?: string; type?: string } | null
|
||||
source?: { id?: string; title?: string } | null
|
||||
}
|
||||
) => ({
|
||||
id: a.id ?? null,
|
||||
status: a.status ?? null,
|
||||
candidate: {
|
||||
id: a.candidate?.id ?? null,
|
||||
name: a.candidate?.name ?? null,
|
||||
},
|
||||
job: {
|
||||
id: a.job?.id ?? null,
|
||||
title: a.job?.title ?? null,
|
||||
},
|
||||
currentInterviewStage: a.currentInterviewStage
|
||||
? {
|
||||
id: a.currentInterviewStage.id ?? null,
|
||||
title: a.currentInterviewStage.title ?? null,
|
||||
type: a.currentInterviewStage.type ?? null,
|
||||
}
|
||||
: null,
|
||||
source: a.source
|
||||
? {
|
||||
id: a.source.id ?? null,
|
||||
title: a.source.title ?? null,
|
||||
}
|
||||
: null,
|
||||
createdAt: a.createdAt ?? null,
|
||||
updatedAt: a.updatedAt ?? null,
|
||||
})
|
||||
),
|
||||
applications: (data.results ?? []).map(mapApplication),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -134,50 +99,7 @@ export const listApplicationsTool: ToolConfig<
|
||||
description: 'List of applications',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Application UUID' },
|
||||
status: {
|
||||
type: 'string',
|
||||
description: 'Application status (Active, Hired, Archived, Lead)',
|
||||
},
|
||||
candidate: {
|
||||
type: 'object',
|
||||
description: 'Associated candidate',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Candidate UUID' },
|
||||
name: { type: 'string', description: 'Candidate name' },
|
||||
},
|
||||
},
|
||||
job: {
|
||||
type: 'object',
|
||||
description: 'Associated job',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Job UUID' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
},
|
||||
},
|
||||
currentInterviewStage: {
|
||||
type: 'object',
|
||||
description: 'Current interview stage',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Stage UUID' },
|
||||
title: { type: 'string', description: 'Stage title' },
|
||||
type: { type: 'string', description: 'Stage type' },
|
||||
},
|
||||
},
|
||||
source: {
|
||||
type: 'object',
|
||||
description: 'Application source',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Source UUID' },
|
||||
title: { type: 'string', description: 'Source title' },
|
||||
},
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
properties: APPLICATION_OUTPUTS,
|
||||
},
|
||||
},
|
||||
moreDataAvailable: {
|
||||
|
||||
@@ -2,16 +2,19 @@ import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListArchiveReasonsParams {
|
||||
apiKey: string
|
||||
includeArchived?: boolean
|
||||
}
|
||||
|
||||
interface AshbyArchiveReason {
|
||||
id: string
|
||||
text: string
|
||||
reasonType: string
|
||||
isArchived: boolean
|
||||
}
|
||||
|
||||
interface AshbyListArchiveReasonsResponse extends ToolResponse {
|
||||
output: {
|
||||
archiveReasons: Array<{
|
||||
id: string
|
||||
text: string
|
||||
reasonType: string
|
||||
isArchived: boolean
|
||||
}>
|
||||
archiveReasons: AshbyArchiveReason[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,6 +34,12 @@ export const listArchiveReasonsTool: ToolConfig<
|
||||
visibility: 'user-only',
|
||||
description: 'Ashby API Key',
|
||||
},
|
||||
includeArchived: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Whether to include archived archive reasons in the response (default false)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -40,7 +49,11 @@ export const listArchiveReasonsTool: ToolConfig<
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: () => ({}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {}
|
||||
if (params.includeArchived !== undefined) body.includeArchived = params.includeArchived
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
@@ -54,10 +67,10 @@ export const listArchiveReasonsTool: ToolConfig<
|
||||
success: true,
|
||||
output: {
|
||||
archiveReasons: (data.results ?? []).map((r: Record<string, unknown>) => ({
|
||||
id: r.id ?? null,
|
||||
text: r.text ?? null,
|
||||
reasonType: r.reasonType ?? null,
|
||||
isArchived: r.isArchived ?? false,
|
||||
id: (r.id as string) ?? '',
|
||||
text: (r.text as string) ?? '',
|
||||
reasonType: (r.reasonType as string) ?? '',
|
||||
isArchived: (r.isArchived as boolean) ?? false,
|
||||
})),
|
||||
},
|
||||
}
|
||||
@@ -72,7 +85,10 @@ export const listArchiveReasonsTool: ToolConfig<
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Archive reason UUID' },
|
||||
text: { type: 'string', description: 'Archive reason text' },
|
||||
reasonType: { type: 'string', description: 'Reason type' },
|
||||
reasonType: {
|
||||
type: 'string',
|
||||
description: 'Reason type (RejectedByCandidate, RejectedByOrg, Other)',
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the reason is archived' },
|
||||
},
|
||||
},
|
||||
|
||||
@@ -2,15 +2,24 @@ import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListCandidateTagsParams {
|
||||
apiKey: string
|
||||
includeArchived?: boolean
|
||||
cursor?: string
|
||||
syncToken?: string
|
||||
perPage?: number
|
||||
}
|
||||
|
||||
interface AshbyCandidateTag {
|
||||
id: string
|
||||
title: string
|
||||
isArchived: boolean
|
||||
}
|
||||
|
||||
interface AshbyListCandidateTagsResponse extends ToolResponse {
|
||||
output: {
|
||||
tags: Array<{
|
||||
id: string
|
||||
title: string
|
||||
isArchived: boolean
|
||||
}>
|
||||
tags: AshbyCandidateTag[]
|
||||
moreDataAvailable: boolean
|
||||
nextCursor: string | null
|
||||
syncToken: string | null
|
||||
}
|
||||
}
|
||||
|
||||
@@ -30,6 +39,30 @@ export const listCandidateTagsTool: ToolConfig<
|
||||
visibility: 'user-only',
|
||||
description: 'Ashby API Key',
|
||||
},
|
||||
includeArchived: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Whether to include archived candidate tags (default false)',
|
||||
},
|
||||
cursor: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Opaque pagination cursor from a previous response nextCursor value',
|
||||
},
|
||||
syncToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Sync token from a previous response to fetch only changed results',
|
||||
},
|
||||
perPage: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-or-llm',
|
||||
description: 'Number of results per page (default 100)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -39,7 +72,14 @@ export const listCandidateTagsTool: ToolConfig<
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Basic ${btoa(`${params.apiKey}:`)}`,
|
||||
}),
|
||||
body: () => ({}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {}
|
||||
if (params.includeArchived !== undefined) body.includeArchived = params.includeArchived
|
||||
if (params.cursor) body.cursor = params.cursor
|
||||
if (params.syncToken) body.syncToken = params.syncToken
|
||||
if (params.perPage) body.limit = params.perPage
|
||||
return body
|
||||
},
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response) => {
|
||||
@@ -53,10 +93,13 @@ export const listCandidateTagsTool: ToolConfig<
|
||||
success: true,
|
||||
output: {
|
||||
tags: (data.results ?? []).map((t: Record<string, unknown>) => ({
|
||||
id: t.id ?? null,
|
||||
title: t.title ?? null,
|
||||
isArchived: t.isArchived ?? false,
|
||||
id: (t.id as string) ?? '',
|
||||
title: (t.title as string) ?? '',
|
||||
isArchived: (t.isArchived as boolean) ?? false,
|
||||
})),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
syncToken: data.syncToken ?? null,
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -74,5 +117,19 @@ export const listCandidateTagsTool: ToolConfig<
|
||||
},
|
||||
},
|
||||
},
|
||||
moreDataAvailable: {
|
||||
type: 'boolean',
|
||||
description: 'Whether more pages of results exist',
|
||||
},
|
||||
nextCursor: {
|
||||
type: 'string',
|
||||
description: 'Opaque cursor for fetching the next page',
|
||||
optional: true,
|
||||
},
|
||||
syncToken: {
|
||||
type: 'string',
|
||||
description: 'Sync token to use for incremental updates in future requests',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AshbyListCandidatesParams, AshbyListCandidatesResponse } from '@/tools/ashby/types'
|
||||
import { CANDIDATE_OUTPUTS, mapCandidate } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyListCandidatesParams, AshbyListCandidatesResponse } from './types'
|
||||
|
||||
export const listCandidatesTool: ToolConfig<
|
||||
AshbyListCandidatesParams,
|
||||
@@ -56,33 +57,7 @@ export const listCandidatesTool: ToolConfig<
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
candidates: (data.results ?? []).map(
|
||||
(
|
||||
c: Record<string, unknown> & {
|
||||
primaryEmailAddress?: { value?: string; type?: string; isPrimary?: boolean }
|
||||
primaryPhoneNumber?: { value?: string; type?: string; isPrimary?: boolean }
|
||||
}
|
||||
) => ({
|
||||
id: c.id ?? null,
|
||||
name: c.name ?? null,
|
||||
primaryEmailAddress: c.primaryEmailAddress
|
||||
? {
|
||||
value: c.primaryEmailAddress.value ?? '',
|
||||
type: c.primaryEmailAddress.type ?? 'Other',
|
||||
isPrimary: c.primaryEmailAddress.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
primaryPhoneNumber: c.primaryPhoneNumber
|
||||
? {
|
||||
value: c.primaryPhoneNumber.value ?? '',
|
||||
type: c.primaryPhoneNumber.type ?? 'Other',
|
||||
isPrimary: c.primaryPhoneNumber.isPrimary ?? true,
|
||||
}
|
||||
: null,
|
||||
createdAt: c.createdAt ?? null,
|
||||
updatedAt: c.updatedAt ?? null,
|
||||
})
|
||||
),
|
||||
candidates: (data.results ?? []).map(mapCandidate),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -95,32 +70,7 @@ export const listCandidatesTool: ToolConfig<
|
||||
description: 'List of candidates',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Candidate UUID' },
|
||||
name: { type: 'string', description: 'Full name' },
|
||||
primaryEmailAddress: {
|
||||
type: 'object',
|
||||
description: 'Primary email contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Email address' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary email' },
|
||||
},
|
||||
},
|
||||
primaryPhoneNumber: {
|
||||
type: 'object',
|
||||
description: 'Primary phone contact info',
|
||||
optional: true,
|
||||
properties: {
|
||||
value: { type: 'string', description: 'Phone number' },
|
||||
type: { type: 'string', description: 'Contact type (Personal, Work, Other)' },
|
||||
isPrimary: { type: 'boolean', description: 'Whether this is the primary phone' },
|
||||
},
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
properties: CANDIDATE_OUTPUTS,
|
||||
},
|
||||
},
|
||||
moreDataAvailable: {
|
||||
|
||||
@@ -4,15 +4,24 @@ interface AshbyListCustomFieldsParams {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
interface AshbyCustomFieldDefinition {
|
||||
id: string
|
||||
title: string
|
||||
isPrivate: boolean
|
||||
fieldType: string
|
||||
objectType: string
|
||||
isArchived: boolean
|
||||
isRequired: boolean
|
||||
selectableValues: Array<{
|
||||
label: string
|
||||
value: string
|
||||
isArchived: boolean
|
||||
}>
|
||||
}
|
||||
|
||||
interface AshbyListCustomFieldsResponse extends ToolResponse {
|
||||
output: {
|
||||
customFields: Array<{
|
||||
id: string
|
||||
title: string
|
||||
fieldType: string
|
||||
objectType: string
|
||||
isArchived: boolean
|
||||
}>
|
||||
customFields: AshbyCustomFieldDefinition[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,13 +63,24 @@ export const listCustomFieldsTool: ToolConfig<
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
customFields: (data.results ?? []).map((f: Record<string, unknown>) => ({
|
||||
id: f.id ?? null,
|
||||
title: f.title ?? null,
|
||||
fieldType: f.fieldType ?? null,
|
||||
objectType: f.objectType ?? null,
|
||||
isArchived: f.isArchived ?? false,
|
||||
})),
|
||||
customFields: (data.results ?? []).map(
|
||||
(f: Record<string, unknown> & { selectableValues?: Array<Record<string, unknown>> }) => ({
|
||||
id: (f.id as string) ?? '',
|
||||
title: (f.title as string) ?? '',
|
||||
isPrivate: (f.isPrivate as boolean) ?? false,
|
||||
fieldType: (f.fieldType as string) ?? '',
|
||||
objectType: (f.objectType as string) ?? '',
|
||||
isArchived: (f.isArchived as boolean) ?? false,
|
||||
isRequired: (f.isRequired as boolean) ?? false,
|
||||
selectableValues: Array.isArray(f.selectableValues)
|
||||
? f.selectableValues.map((v) => ({
|
||||
label: (v.label as string) ?? '',
|
||||
value: (v.value as string) ?? '',
|
||||
isArchived: (v.isArchived as boolean) ?? false,
|
||||
}))
|
||||
: [],
|
||||
})
|
||||
),
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -74,12 +94,35 @@ export const listCustomFieldsTool: ToolConfig<
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Custom field UUID' },
|
||||
title: { type: 'string', description: 'Custom field title' },
|
||||
fieldType: { type: 'string', description: 'Field type (e.g. String, Number, Boolean)' },
|
||||
isPrivate: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the custom field is private',
|
||||
},
|
||||
fieldType: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Field data type (MultiValueSelect, NumberRange, String, Date, ValueSelect, Number, Currency, Boolean, LongText, CompensationRange)',
|
||||
},
|
||||
objectType: {
|
||||
type: 'string',
|
||||
description: 'Object type the field applies to (e.g. Candidate, Application, Job)',
|
||||
description:
|
||||
'Object type the field applies to (Application, Candidate, Employee, Job, Offer, Opening, Talent_Project)',
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the custom field is archived' },
|
||||
isRequired: { type: 'boolean', description: 'Whether a value is required' },
|
||||
selectableValues: {
|
||||
type: 'array',
|
||||
description:
|
||||
'Selectable values for MultiValueSelect fields (empty for other field types)',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
label: { type: 'string', description: 'Display label' },
|
||||
value: { type: 'string', description: 'Stored value' },
|
||||
isArchived: { type: 'boolean', description: 'Whether archived' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -4,14 +4,19 @@ interface AshbyListDepartmentsParams {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
interface AshbyDepartment {
|
||||
id: string
|
||||
name: string
|
||||
externalName: string | null
|
||||
isArchived: boolean
|
||||
parentId: string | null
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
interface AshbyListDepartmentsResponse extends ToolResponse {
|
||||
output: {
|
||||
departments: Array<{
|
||||
id: string
|
||||
name: string
|
||||
isArchived: boolean
|
||||
parentId: string | null
|
||||
}>
|
||||
departments: AshbyDepartment[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,10 +59,13 @@ export const listDepartmentsTool: ToolConfig<
|
||||
success: true,
|
||||
output: {
|
||||
departments: (data.results ?? []).map((d: Record<string, unknown>) => ({
|
||||
id: d.id ?? null,
|
||||
name: d.name ?? null,
|
||||
isArchived: d.isArchived ?? false,
|
||||
id: (d.id as string) ?? '',
|
||||
name: (d.name as string) ?? '',
|
||||
externalName: (d.externalName as string) ?? null,
|
||||
isArchived: (d.isArchived as boolean) ?? false,
|
||||
parentId: (d.parentId as string) ?? null,
|
||||
createdAt: (d.createdAt as string) ?? null,
|
||||
updatedAt: (d.updatedAt as string) ?? null,
|
||||
})),
|
||||
},
|
||||
}
|
||||
@@ -72,12 +80,27 @@ export const listDepartmentsTool: ToolConfig<
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Department UUID' },
|
||||
name: { type: 'string', description: 'Department name' },
|
||||
externalName: {
|
||||
type: 'string',
|
||||
description: 'Candidate-facing name used on job boards',
|
||||
optional: true,
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the department is archived' },
|
||||
parentId: {
|
||||
type: 'string',
|
||||
description: 'Parent department UUID',
|
||||
optional: true,
|
||||
},
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 creation timestamp',
|
||||
optional: true,
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 last update timestamp',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyUserSummary } from '@/tools/ashby/types'
|
||||
import { mapUserSummary, USER_SUMMARY_OUTPUT } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListInterviewSchedulesParams {
|
||||
@@ -8,20 +10,81 @@ interface AshbyListInterviewSchedulesParams {
|
||||
perPage?: number
|
||||
}
|
||||
|
||||
interface AshbyInterviewEvent {
|
||||
id: string
|
||||
interviewId: string | null
|
||||
interviewScheduleId: string | null
|
||||
interviewerUserIds: string[]
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
startTime: string | null
|
||||
endTime: string | null
|
||||
feedbackLink: string | null
|
||||
location: string | null
|
||||
meetingLink: string | null
|
||||
hasSubmittedFeedback: boolean
|
||||
}
|
||||
|
||||
interface AshbyInterviewSchedule {
|
||||
id: string
|
||||
status: string | null
|
||||
applicationId: string
|
||||
interviewStageId: string | null
|
||||
scheduledBy: AshbyUserSummary | null
|
||||
createdAt: string | null
|
||||
updatedAt: string | null
|
||||
interviewEvents: AshbyInterviewEvent[]
|
||||
}
|
||||
|
||||
interface AshbyListInterviewSchedulesResponse extends ToolResponse {
|
||||
output: {
|
||||
interviewSchedules: Array<{
|
||||
id: string
|
||||
applicationId: string
|
||||
interviewStageId: string | null
|
||||
status: string | null
|
||||
createdAt: string
|
||||
}>
|
||||
interviewSchedules: AshbyInterviewSchedule[]
|
||||
moreDataAvailable: boolean
|
||||
nextCursor: string | null
|
||||
}
|
||||
}
|
||||
|
||||
type UnknownRecord = Record<string, unknown>
|
||||
|
||||
function mapInterviewEvent(raw: unknown): AshbyInterviewEvent | null {
|
||||
if (!raw || typeof raw !== 'object') return null
|
||||
const e = raw as UnknownRecord
|
||||
return {
|
||||
id: (e.id as string) ?? '',
|
||||
interviewId: (e.interviewId as string) ?? null,
|
||||
interviewScheduleId: (e.interviewScheduleId as string) ?? null,
|
||||
interviewerUserIds: Array.isArray(e.interviewerUserIds)
|
||||
? (e.interviewerUserIds as string[])
|
||||
: [],
|
||||
createdAt: (e.createdAt as string) ?? null,
|
||||
updatedAt: (e.updatedAt as string) ?? null,
|
||||
startTime: (e.startTime as string) ?? null,
|
||||
endTime: (e.endTime as string) ?? null,
|
||||
feedbackLink: (e.feedbackLink as string) ?? null,
|
||||
location: (e.location as string) ?? null,
|
||||
meetingLink: (e.meetingLink as string) ?? null,
|
||||
hasSubmittedFeedback: (e.hasSubmittedFeedback as boolean) ?? false,
|
||||
}
|
||||
}
|
||||
|
||||
function mapInterviewSchedule(raw: unknown): AshbyInterviewSchedule {
|
||||
const s = (raw ?? {}) as UnknownRecord
|
||||
return {
|
||||
id: (s.id as string) ?? '',
|
||||
status: (s.status as string) ?? null,
|
||||
applicationId: (s.applicationId as string) ?? '',
|
||||
interviewStageId: (s.interviewStageId as string) ?? null,
|
||||
scheduledBy: mapUserSummary(s.scheduledBy),
|
||||
createdAt: (s.createdAt as string) ?? null,
|
||||
updatedAt: (s.updatedAt as string) ?? null,
|
||||
interviewEvents: Array.isArray(s.interviewEvents)
|
||||
? (s.interviewEvents as unknown[])
|
||||
.map(mapInterviewEvent)
|
||||
.filter((e): e is AshbyInterviewEvent => e !== null)
|
||||
: [],
|
||||
}
|
||||
}
|
||||
|
||||
export const listInterviewsTool: ToolConfig<
|
||||
AshbyListInterviewSchedulesParams,
|
||||
AshbyListInterviewSchedulesResponse
|
||||
@@ -74,8 +137,8 @@ export const listInterviewsTool: ToolConfig<
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {}
|
||||
if (params.applicationId) body.applicationId = params.applicationId
|
||||
if (params.interviewStageId) body.interviewStageId = params.interviewStageId
|
||||
if (params.applicationId) body.applicationId = params.applicationId.trim()
|
||||
if (params.interviewStageId) body.interviewStageId = params.interviewStageId.trim()
|
||||
if (params.cursor) body.cursor = params.cursor
|
||||
if (params.perPage) body.limit = params.perPage
|
||||
return body
|
||||
@@ -92,13 +155,7 @@ export const listInterviewsTool: ToolConfig<
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
interviewSchedules: (data.results ?? []).map((s: Record<string, unknown>) => ({
|
||||
id: s.id ?? null,
|
||||
applicationId: s.applicationId ?? null,
|
||||
interviewStageId: s.interviewStageId ?? null,
|
||||
status: s.status ?? null,
|
||||
createdAt: s.createdAt ?? null,
|
||||
})),
|
||||
interviewSchedules: (data.results ?? []).map(mapInterviewSchedule),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -113,14 +170,92 @@ export const listInterviewsTool: ToolConfig<
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Interview schedule UUID' },
|
||||
status: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Schedule status (NeedsScheduling, WaitingOnCandidateBooking, Scheduled, Complete, Cancelled, OnHold, etc.)',
|
||||
optional: true,
|
||||
},
|
||||
applicationId: { type: 'string', description: 'Associated application UUID' },
|
||||
interviewStageId: {
|
||||
type: 'string',
|
||||
description: 'Interview stage UUID',
|
||||
optional: true,
|
||||
},
|
||||
status: { type: 'string', description: 'Schedule status', optional: true },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
scheduledBy: {
|
||||
...USER_SUMMARY_OUTPUT,
|
||||
description: 'User who scheduled the interview (null if not yet scheduled)',
|
||||
},
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 creation timestamp',
|
||||
optional: true,
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 last update timestamp',
|
||||
optional: true,
|
||||
},
|
||||
interviewEvents: {
|
||||
type: 'array',
|
||||
description: 'Scheduled interview events on this schedule',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Event UUID' },
|
||||
interviewId: {
|
||||
type: 'string',
|
||||
description: 'Interview template UUID',
|
||||
optional: true,
|
||||
},
|
||||
interviewScheduleId: {
|
||||
type: 'string',
|
||||
description: 'Parent schedule UUID',
|
||||
optional: true,
|
||||
},
|
||||
interviewerUserIds: {
|
||||
type: 'array',
|
||||
description: 'User UUIDs of interviewers assigned to the event',
|
||||
items: { type: 'string', description: 'User UUID' },
|
||||
},
|
||||
createdAt: {
|
||||
type: 'string',
|
||||
description: 'Event creation timestamp',
|
||||
optional: true,
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'Event last updated timestamp',
|
||||
optional: true,
|
||||
},
|
||||
startTime: {
|
||||
type: 'string',
|
||||
description: 'Event start time',
|
||||
optional: true,
|
||||
},
|
||||
endTime: { type: 'string', description: 'Event end time', optional: true },
|
||||
feedbackLink: {
|
||||
type: 'string',
|
||||
description: 'URL to submit feedback for the event',
|
||||
optional: true,
|
||||
},
|
||||
location: {
|
||||
type: 'string',
|
||||
description: 'Physical location',
|
||||
optional: true,
|
||||
},
|
||||
meetingLink: {
|
||||
type: 'string',
|
||||
description: 'Virtual meeting URL',
|
||||
optional: true,
|
||||
},
|
||||
hasSubmittedFeedback: {
|
||||
type: 'boolean',
|
||||
description: 'Whether any feedback has been submitted',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -4,18 +4,32 @@ interface AshbyListJobPostingsParams {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
interface AshbyJobPostingSummary {
|
||||
id: string
|
||||
title: string
|
||||
jobId: string | null
|
||||
departmentName: string | null
|
||||
teamName: string | null
|
||||
locationName: string | null
|
||||
locationIds: {
|
||||
primaryLocationId: string | null
|
||||
secondaryLocationIds: string[]
|
||||
} | null
|
||||
workplaceType: string | null
|
||||
employmentType: string | null
|
||||
isListed: boolean
|
||||
publishedDate: string | null
|
||||
applicationDeadline: string | null
|
||||
externalLink: string | null
|
||||
applyLink: string | null
|
||||
compensationTierSummary: string | null
|
||||
shouldDisplayCompensationOnJobBoard: boolean
|
||||
updatedAt: string | null
|
||||
}
|
||||
|
||||
interface AshbyListJobPostingsResponse extends ToolResponse {
|
||||
output: {
|
||||
jobPostings: Array<{
|
||||
id: string
|
||||
title: string
|
||||
jobId: string | null
|
||||
locationName: string | null
|
||||
departmentName: string | null
|
||||
employmentType: string | null
|
||||
isListed: boolean
|
||||
publishedDate: string | null
|
||||
}>
|
||||
jobPostings: AshbyJobPostingSummary[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,16 +71,39 @@ export const listJobPostingsTool: ToolConfig<
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
jobPostings: (data.results ?? []).map((jp: Record<string, unknown>) => ({
|
||||
id: jp.id ?? null,
|
||||
title: (jp.jobTitle as string) ?? (jp.title as string) ?? null,
|
||||
jobId: jp.jobId ?? null,
|
||||
locationName: jp.locationName ?? null,
|
||||
departmentName: jp.departmentName ?? null,
|
||||
employmentType: jp.employmentType ?? null,
|
||||
isListed: jp.isListed ?? false,
|
||||
publishedDate: jp.publishedDate ?? null,
|
||||
})),
|
||||
jobPostings: (data.results ?? []).map(
|
||||
(
|
||||
jp: Record<string, unknown> & {
|
||||
locationIds?: { primaryLocationId?: string; secondaryLocationIds?: string[] }
|
||||
}
|
||||
) => ({
|
||||
id: (jp.id as string) ?? '',
|
||||
title: (jp.title as string) ?? '',
|
||||
jobId: (jp.jobId as string) ?? null,
|
||||
departmentName: (jp.departmentName as string) ?? null,
|
||||
teamName: (jp.teamName as string) ?? null,
|
||||
locationName: (jp.locationName as string) ?? null,
|
||||
locationIds: jp.locationIds
|
||||
? {
|
||||
primaryLocationId: jp.locationIds.primaryLocationId ?? null,
|
||||
secondaryLocationIds: Array.isArray(jp.locationIds.secondaryLocationIds)
|
||||
? jp.locationIds.secondaryLocationIds
|
||||
: [],
|
||||
}
|
||||
: null,
|
||||
workplaceType: (jp.workplaceType as string) ?? null,
|
||||
employmentType: (jp.employmentType as string) ?? null,
|
||||
isListed: (jp.isListed as boolean) ?? false,
|
||||
publishedDate: (jp.publishedDate as string) ?? null,
|
||||
applicationDeadline: (jp.applicationDeadline as string) ?? null,
|
||||
externalLink: (jp.externalLink as string) ?? null,
|
||||
applyLink: (jp.applyLink as string) ?? null,
|
||||
compensationTierSummary: (jp.compensationTierSummary as string) ?? null,
|
||||
shouldDisplayCompensationOnJobBoard:
|
||||
(jp.shouldDisplayCompensationOnJobBoard as boolean) ?? false,
|
||||
updatedAt: (jp.updatedAt as string) ?? null,
|
||||
})
|
||||
),
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -81,15 +118,75 @@ export const listJobPostingsTool: ToolConfig<
|
||||
id: { type: 'string', description: 'Job posting UUID' },
|
||||
title: { type: 'string', description: 'Job posting title' },
|
||||
jobId: { type: 'string', description: 'Associated job UUID', optional: true },
|
||||
locationName: { type: 'string', description: 'Location name', optional: true },
|
||||
departmentName: { type: 'string', description: 'Department name', optional: true },
|
||||
teamName: { type: 'string', description: 'Team name', optional: true },
|
||||
locationName: {
|
||||
type: 'string',
|
||||
description: 'Primary location display name',
|
||||
optional: true,
|
||||
},
|
||||
locationIds: {
|
||||
type: 'object',
|
||||
description: 'Primary and secondary location UUIDs',
|
||||
optional: true,
|
||||
properties: {
|
||||
primaryLocationId: {
|
||||
type: 'string',
|
||||
description: 'Primary location UUID',
|
||||
optional: true,
|
||||
},
|
||||
secondaryLocationIds: {
|
||||
type: 'array',
|
||||
description: 'Secondary location UUIDs',
|
||||
items: { type: 'string', description: 'Location UUID' },
|
||||
},
|
||||
},
|
||||
},
|
||||
workplaceType: {
|
||||
type: 'string',
|
||||
description: 'Workplace type (OnSite, Remote, Hybrid)',
|
||||
optional: true,
|
||||
},
|
||||
employmentType: {
|
||||
type: 'string',
|
||||
description: 'Employment type (e.g. FullTime, PartTime, Contract)',
|
||||
description: 'Employment type (FullTime, PartTime, Intern, Contract, Temporary)',
|
||||
optional: true,
|
||||
},
|
||||
isListed: { type: 'boolean', description: 'Whether the posting is publicly listed' },
|
||||
publishedDate: { type: 'string', description: 'ISO 8601 published date', optional: true },
|
||||
publishedDate: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 published date',
|
||||
optional: true,
|
||||
},
|
||||
applicationDeadline: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 application deadline',
|
||||
optional: true,
|
||||
},
|
||||
externalLink: {
|
||||
type: 'string',
|
||||
description: 'External link to the job posting',
|
||||
optional: true,
|
||||
},
|
||||
applyLink: {
|
||||
type: 'string',
|
||||
description: 'Direct apply link for the job posting',
|
||||
optional: true,
|
||||
},
|
||||
compensationTierSummary: {
|
||||
type: 'string',
|
||||
description: 'Compensation tier summary for job boards',
|
||||
optional: true,
|
||||
},
|
||||
shouldDisplayCompensationOnJobBoard: {
|
||||
type: 'boolean',
|
||||
description: 'Whether compensation is shown on the job board',
|
||||
},
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 last update timestamp',
|
||||
optional: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { AshbyListJobsParams, AshbyListJobsResponse } from '@/tools/ashby/types'
|
||||
import { JOB_OUTPUTS, mapJob } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
import type { AshbyListJobsParams, AshbyListJobsResponse } from './types'
|
||||
|
||||
export const listJobsTool: ToolConfig<AshbyListJobsParams, AshbyListJobsResponse> = {
|
||||
id: 'ashby_list_jobs',
|
||||
@@ -61,16 +62,7 @@ export const listJobsTool: ToolConfig<AshbyListJobsParams, AshbyListJobsResponse
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
jobs: (data.results ?? []).map((j: Record<string, unknown>) => ({
|
||||
id: j.id ?? null,
|
||||
title: j.title ?? null,
|
||||
status: j.status ?? null,
|
||||
employmentType: j.employmentType ?? null,
|
||||
departmentId: j.departmentId ?? null,
|
||||
locationId: j.locationId ?? null,
|
||||
createdAt: j.createdAt ?? null,
|
||||
updatedAt: j.updatedAt ?? null,
|
||||
})),
|
||||
jobs: (data.results ?? []).map(mapJob),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -83,20 +75,7 @@ export const listJobsTool: ToolConfig<AshbyListJobsParams, AshbyListJobsResponse
|
||||
description: 'List of jobs',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Job UUID' },
|
||||
title: { type: 'string', description: 'Job title' },
|
||||
status: { type: 'string', description: 'Job status (Open, Closed, Archived, Draft)' },
|
||||
employmentType: {
|
||||
type: 'string',
|
||||
description: 'Employment type (FullTime, PartTime, Intern, Contract, Temporary)',
|
||||
optional: true,
|
||||
},
|
||||
departmentId: { type: 'string', description: 'Department UUID', optional: true },
|
||||
locationId: { type: 'string', description: 'Location UUID', optional: true },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'ISO 8601 last update timestamp' },
|
||||
},
|
||||
properties: JOB_OUTPUTS,
|
||||
},
|
||||
},
|
||||
moreDataAvailable: {
|
||||
|
||||
@@ -4,19 +4,27 @@ interface AshbyListLocationsParams {
|
||||
apiKey: string
|
||||
}
|
||||
|
||||
interface AshbyLocation {
|
||||
id: string
|
||||
name: string
|
||||
externalName: string | null
|
||||
isArchived: boolean
|
||||
isRemote: boolean
|
||||
workplaceType: string | null
|
||||
parentLocationId: string | null
|
||||
type: string | null
|
||||
address: {
|
||||
addressCountry: string | null
|
||||
addressRegion: string | null
|
||||
addressLocality: string | null
|
||||
postalCode: string | null
|
||||
streetAddress: string | null
|
||||
} | null
|
||||
}
|
||||
|
||||
interface AshbyListLocationsResponse extends ToolResponse {
|
||||
output: {
|
||||
locations: Array<{
|
||||
id: string
|
||||
name: string
|
||||
isArchived: boolean
|
||||
isRemote: boolean
|
||||
address: {
|
||||
city: string | null
|
||||
region: string | null
|
||||
country: string | null
|
||||
} | null
|
||||
}>
|
||||
locations: AshbyLocation[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,27 +66,30 @@ export const listLocationsTool: ToolConfig<AshbyListLocationsParams, AshbyListLo
|
||||
locations: (data.results ?? []).map(
|
||||
(
|
||||
l: Record<string, unknown> & {
|
||||
address?: {
|
||||
postalAddress?: {
|
||||
addressLocality?: string
|
||||
addressRegion?: string
|
||||
addressCountry?: string
|
||||
}
|
||||
}
|
||||
address?: { postalAddress?: Record<string, unknown> }
|
||||
}
|
||||
) => ({
|
||||
id: l.id ?? null,
|
||||
name: l.name ?? null,
|
||||
isArchived: l.isArchived ?? false,
|
||||
isRemote: l.isRemote ?? false,
|
||||
address: l.address?.postalAddress
|
||||
? {
|
||||
city: l.address.postalAddress.addressLocality ?? null,
|
||||
region: l.address.postalAddress.addressRegion ?? null,
|
||||
country: l.address.postalAddress.addressCountry ?? null,
|
||||
}
|
||||
: null,
|
||||
})
|
||||
) => {
|
||||
const pa = l.address?.postalAddress
|
||||
return {
|
||||
id: (l.id as string) ?? '',
|
||||
name: (l.name as string) ?? '',
|
||||
externalName: (l.externalName as string) ?? null,
|
||||
isArchived: (l.isArchived as boolean) ?? false,
|
||||
isRemote: (l.isRemote as boolean) ?? false,
|
||||
workplaceType: (l.workplaceType as string) ?? null,
|
||||
parentLocationId: (l.parentLocationId as string) ?? null,
|
||||
type: (l.type as string) ?? null,
|
||||
address: pa
|
||||
? {
|
||||
addressCountry: (pa.addressCountry as string) ?? null,
|
||||
addressRegion: (pa.addressRegion as string) ?? null,
|
||||
addressLocality: (pa.addressLocality as string) ?? null,
|
||||
postalCode: (pa.postalCode as string) ?? null,
|
||||
streetAddress: (pa.streetAddress as string) ?? null,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
}
|
||||
),
|
||||
},
|
||||
}
|
||||
@@ -93,16 +104,49 @@ export const listLocationsTool: ToolConfig<AshbyListLocationsParams, AshbyListLo
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Location UUID' },
|
||||
name: { type: 'string', description: 'Location name' },
|
||||
externalName: {
|
||||
type: 'string',
|
||||
description: 'Candidate-facing name used on job boards',
|
||||
optional: true,
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the location is archived' },
|
||||
isRemote: { type: 'boolean', description: 'Whether this is a remote location' },
|
||||
isRemote: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the location is remote (use workplaceType instead)',
|
||||
},
|
||||
workplaceType: {
|
||||
type: 'string',
|
||||
description: 'Workplace type (OnSite, Hybrid, Remote)',
|
||||
optional: true,
|
||||
},
|
||||
parentLocationId: {
|
||||
type: 'string',
|
||||
description: 'Parent location UUID',
|
||||
optional: true,
|
||||
},
|
||||
type: {
|
||||
type: 'string',
|
||||
description: 'Location component type (Location, LocationHierarchy)',
|
||||
optional: true,
|
||||
},
|
||||
address: {
|
||||
type: 'object',
|
||||
description: 'Location address',
|
||||
description: 'Location postal address',
|
||||
optional: true,
|
||||
properties: {
|
||||
city: { type: 'string', description: 'City', optional: true },
|
||||
region: { type: 'string', description: 'State or region', optional: true },
|
||||
country: { type: 'string', description: 'Country', optional: true },
|
||||
addressCountry: { type: 'string', description: 'Country', optional: true },
|
||||
addressRegion: {
|
||||
type: 'string',
|
||||
description: 'State or region',
|
||||
optional: true,
|
||||
},
|
||||
addressLocality: {
|
||||
type: 'string',
|
||||
description: 'City or locality',
|
||||
optional: true,
|
||||
},
|
||||
postalCode: { type: 'string', description: 'Postal code', optional: true },
|
||||
streetAddress: { type: 'string', description: 'Street address', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -11,14 +11,15 @@ interface AshbyListNotesResponse extends ToolResponse {
|
||||
output: {
|
||||
notes: Array<{
|
||||
id: string
|
||||
content: string
|
||||
content: string | null
|
||||
isPrivate: boolean
|
||||
author: {
|
||||
id: string
|
||||
firstName: string
|
||||
lastName: string
|
||||
email: string
|
||||
firstName: string | null
|
||||
lastName: string | null
|
||||
email: string | null
|
||||
} | null
|
||||
createdAt: string
|
||||
createdAt: string | null
|
||||
}>
|
||||
moreDataAvailable: boolean
|
||||
nextCursor: string | null
|
||||
@@ -67,7 +68,7 @@ export const listNotesTool: ToolConfig<AshbyListNotesParams, AshbyListNotesRespo
|
||||
}),
|
||||
body: (params) => {
|
||||
const body: Record<string, unknown> = {
|
||||
candidateId: params.candidateId,
|
||||
candidateId: params.candidateId.trim(),
|
||||
}
|
||||
if (params.cursor) body.cursor = params.cursor
|
||||
if (params.perPage) body.limit = params.perPage
|
||||
@@ -91,17 +92,18 @@ export const listNotesTool: ToolConfig<AshbyListNotesParams, AshbyListNotesRespo
|
||||
author?: { id?: string; firstName?: string; lastName?: string; email?: string }
|
||||
}
|
||||
) => ({
|
||||
id: n.id ?? null,
|
||||
content: n.content ?? null,
|
||||
id: (n.id as string) ?? '',
|
||||
content: (n.content as string) ?? null,
|
||||
isPrivate: (n.isPrivate as boolean) ?? false,
|
||||
author: n.author
|
||||
? {
|
||||
id: n.author.id ?? null,
|
||||
id: n.author.id ?? '',
|
||||
firstName: n.author.firstName ?? null,
|
||||
lastName: n.author.lastName ?? null,
|
||||
email: n.author.email ?? null,
|
||||
}
|
||||
: null,
|
||||
createdAt: n.createdAt ?? null,
|
||||
createdAt: (n.createdAt as string) ?? null,
|
||||
})
|
||||
),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
@@ -118,19 +120,20 @@ export const listNotesTool: ToolConfig<AshbyListNotesParams, AshbyListNotesRespo
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Note UUID' },
|
||||
content: { type: 'string', description: 'Note content' },
|
||||
content: { type: 'string', description: 'Note content', optional: true },
|
||||
isPrivate: { type: 'boolean', description: 'Whether the note is private' },
|
||||
author: {
|
||||
type: 'object',
|
||||
description: 'Note author',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Author user UUID' },
|
||||
firstName: { type: 'string', description: 'First name' },
|
||||
lastName: { type: 'string', description: 'Last name' },
|
||||
email: { type: 'string', description: 'Email address' },
|
||||
firstName: { type: 'string', description: 'First name', optional: true },
|
||||
lastName: { type: 'string', description: 'Last name', optional: true },
|
||||
email: { type: 'string', description: 'Email address', optional: true },
|
||||
},
|
||||
},
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp' },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyOffer } from '@/tools/ashby/types'
|
||||
import { mapOffer, OFFER_OUTPUTS } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListOffersParams {
|
||||
@@ -8,19 +10,7 @@ interface AshbyListOffersParams {
|
||||
|
||||
interface AshbyListOffersResponse extends ToolResponse {
|
||||
output: {
|
||||
offers: Array<{
|
||||
id: string
|
||||
offerStatus: string
|
||||
acceptanceStatus: string | null
|
||||
applicationId: string | null
|
||||
startDate: string | null
|
||||
salary: {
|
||||
currencyCode: string
|
||||
value: number
|
||||
} | null
|
||||
openingId: string | null
|
||||
createdAt: string | null
|
||||
}>
|
||||
offers: AshbyOffer[]
|
||||
moreDataAvailable: boolean
|
||||
nextCursor: string | null
|
||||
}
|
||||
@@ -78,35 +68,7 @@ export const listOffersTool: ToolConfig<AshbyListOffersParams, AshbyListOffersRe
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
offers: (data.results ?? []).map(
|
||||
(
|
||||
o: Record<string, unknown> & {
|
||||
latestVersion?: {
|
||||
startDate?: string
|
||||
salary?: { currencyCode?: string; value?: number }
|
||||
openingId?: string
|
||||
createdAt?: string
|
||||
}
|
||||
}
|
||||
) => {
|
||||
const v = o.latestVersion
|
||||
return {
|
||||
id: o.id ?? null,
|
||||
offerStatus: o.offerStatus ?? null,
|
||||
acceptanceStatus: o.acceptanceStatus ?? null,
|
||||
applicationId: o.applicationId ?? null,
|
||||
startDate: v?.startDate ?? null,
|
||||
salary: v?.salary
|
||||
? {
|
||||
currencyCode: v.salary.currencyCode ?? null,
|
||||
value: v.salary.value ?? null,
|
||||
}
|
||||
: null,
|
||||
openingId: v?.openingId ?? null,
|
||||
createdAt: v?.createdAt ?? null,
|
||||
}
|
||||
}
|
||||
),
|
||||
offers: (data.results ?? []).map(mapOffer),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -119,28 +81,7 @@ export const listOffersTool: ToolConfig<AshbyListOffersParams, AshbyListOffersRe
|
||||
description: 'List of offers',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Offer UUID' },
|
||||
offerStatus: { type: 'string', description: 'Offer status' },
|
||||
acceptanceStatus: { type: 'string', description: 'Acceptance status', optional: true },
|
||||
applicationId: {
|
||||
type: 'string',
|
||||
description: 'Associated application UUID',
|
||||
optional: true,
|
||||
},
|
||||
startDate: { type: 'string', description: 'Offer start date', optional: true },
|
||||
salary: {
|
||||
type: 'object',
|
||||
description: 'Salary details',
|
||||
optional: true,
|
||||
properties: {
|
||||
currencyCode: { type: 'string', description: 'ISO 4217 currency code' },
|
||||
value: { type: 'number', description: 'Salary amount' },
|
||||
},
|
||||
},
|
||||
openingId: { type: 'string', description: 'Associated opening UUID', optional: true },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 creation timestamp', optional: true },
|
||||
},
|
||||
properties: OFFER_OUTPUTS,
|
||||
},
|
||||
},
|
||||
moreDataAvailable: {
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import type { AshbyOpening } from '@/tools/ashby/types'
|
||||
import { mapOpenings, OPENINGS_OUTPUT } from '@/tools/ashby/utils'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListOpeningsParams {
|
||||
@@ -8,13 +10,7 @@ interface AshbyListOpeningsParams {
|
||||
|
||||
interface AshbyListOpeningsResponse extends ToolResponse {
|
||||
output: {
|
||||
openings: Array<{
|
||||
id: string
|
||||
openingState: string | null
|
||||
isArchived: boolean
|
||||
openedAt: string | null
|
||||
closedAt: string | null
|
||||
}>
|
||||
openings: AshbyOpening[]
|
||||
moreDataAvailable: boolean
|
||||
nextCursor: string | null
|
||||
}
|
||||
@@ -72,13 +68,7 @@ export const listOpeningsTool: ToolConfig<AshbyListOpeningsParams, AshbyListOpen
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
openings: (data.results ?? []).map((o: Record<string, unknown>) => ({
|
||||
id: o.id ?? null,
|
||||
openingState: o.openingState ?? null,
|
||||
isArchived: o.isArchived ?? false,
|
||||
openedAt: o.openedAt ?? null,
|
||||
closedAt: o.closedAt ?? null,
|
||||
})),
|
||||
openings: mapOpenings(data.results),
|
||||
moreDataAvailable: data.moreDataAvailable ?? false,
|
||||
nextCursor: data.nextCursor ?? null,
|
||||
},
|
||||
@@ -86,24 +76,7 @@ export const listOpeningsTool: ToolConfig<AshbyListOpeningsParams, AshbyListOpen
|
||||
},
|
||||
|
||||
outputs: {
|
||||
openings: {
|
||||
type: 'array',
|
||||
description: 'List of openings',
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Opening UUID' },
|
||||
openingState: {
|
||||
type: 'string',
|
||||
description: 'Opening state (Approved, Closed, Draft, Filled, Open)',
|
||||
optional: true,
|
||||
},
|
||||
isArchived: { type: 'boolean', description: 'Whether the opening is archived' },
|
||||
openedAt: { type: 'string', description: 'ISO 8601 opened timestamp', optional: true },
|
||||
closedAt: { type: 'string', description: 'ISO 8601 closed timestamp', optional: true },
|
||||
},
|
||||
},
|
||||
},
|
||||
openings: OPENINGS_OUTPUT,
|
||||
moreDataAvailable: {
|
||||
type: 'boolean',
|
||||
description: 'Whether more pages of results exist',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { AshbySourceSummary } from '@/tools/ashby/types'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
interface AshbyListSourcesParams {
|
||||
@@ -6,11 +7,7 @@ interface AshbyListSourcesParams {
|
||||
|
||||
interface AshbyListSourcesResponse extends ToolResponse {
|
||||
output: {
|
||||
sources: Array<{
|
||||
id: string
|
||||
title: string
|
||||
isArchived: boolean
|
||||
}>
|
||||
sources: AshbySourceSummary[]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,11 +46,23 @@ export const listSourcesTool: ToolConfig<AshbyListSourcesParams, AshbyListSource
|
||||
return {
|
||||
success: true,
|
||||
output: {
|
||||
sources: (data.results ?? []).map((s: Record<string, unknown>) => ({
|
||||
id: s.id ?? null,
|
||||
title: s.title ?? null,
|
||||
isArchived: s.isArchived ?? false,
|
||||
})),
|
||||
sources: (data.results ?? []).map(
|
||||
(s: Record<string, unknown> & { sourceType?: Record<string, unknown> }) => {
|
||||
const sourceType = s.sourceType
|
||||
return {
|
||||
id: (s.id as string) ?? '',
|
||||
title: (s.title as string) ?? '',
|
||||
isArchived: (s.isArchived as boolean) ?? false,
|
||||
sourceType: sourceType
|
||||
? {
|
||||
id: (sourceType.id as string) ?? '',
|
||||
title: (sourceType.title as string) ?? '',
|
||||
isArchived: (sourceType.isArchived as boolean) ?? false,
|
||||
}
|
||||
: null,
|
||||
}
|
||||
}
|
||||
),
|
||||
},
|
||||
}
|
||||
},
|
||||
@@ -68,6 +77,16 @@ export const listSourcesTool: ToolConfig<AshbyListSourcesParams, AshbyListSource
|
||||
id: { type: 'string', description: 'Source UUID' },
|
||||
title: { type: 'string', description: 'Source title' },
|
||||
isArchived: { type: 'boolean', description: 'Whether the source is archived' },
|
||||
sourceType: {
|
||||
type: 'object',
|
||||
description: 'Source type grouping',
|
||||
optional: true,
|
||||
properties: {
|
||||
id: { type: 'string', description: 'Source type UUID' },
|
||||
title: { type: 'string', description: 'Source type title' },
|
||||
isArchived: { type: 'boolean', description: 'Whether archived' },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user