mirror of
https://github.com/simstudioai/sim.git
synced 2026-04-28 03:00:29 -04:00
Compare commits
48 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
489f2d3bd0 | ||
|
|
65e17de065 | ||
|
|
79ff5d80b3 | ||
|
|
2a52141d2f | ||
|
|
76ad59fd7d | ||
|
|
c32c1cb917 | ||
|
|
58a3ae2aa4 | ||
|
|
50e74f75ef | ||
|
|
60652e621c | ||
|
|
8863f1132a | ||
|
|
d6c1bc2fef | ||
|
|
d93a6f57bc | ||
|
|
df581c3efb | ||
|
|
595c4c3613 | ||
|
|
f16d17ba49 | ||
|
|
e6fefc863c | ||
|
|
5fba724818 | ||
|
|
60b80ec172 | ||
|
|
af4be770a1 | ||
|
|
f330fe22a2 | ||
|
|
efc868263a | ||
|
|
56044776d5 | ||
|
|
04f1d015f3 | ||
|
|
3422f64c5f | ||
|
|
ccb5f1e690 | ||
|
|
6066fc1960 | ||
|
|
91ccbb9921 | ||
|
|
dcbe7c69b0 | ||
|
|
c22ac38ab0 | ||
|
|
cdde8cbd66 | ||
|
|
0ae19dab85 | ||
|
|
3b11c814f8 | ||
|
|
b86ebb35fd | ||
|
|
65972f2fa3 | ||
|
|
7ca736a7a1 | ||
|
|
5f0f0edd63 | ||
|
|
bed5e95742 | ||
|
|
f7ab39984c | ||
|
|
8ce56fe1f2 | ||
|
|
64cfda523b | ||
|
|
8c9ddefc53 | ||
|
|
d927d8bdff | ||
|
|
0aeab026a8 | ||
|
|
7c619e78d8 | ||
|
|
41a1b50ace | ||
|
|
bbf400ff13 | ||
|
|
7941dcde98 | ||
|
|
51ace655e4 |
@@ -144,7 +144,7 @@ vi.useFakeTimers()
|
||||
| `@/app/api/auth/oauth/utils` | `authOAuthUtilsMock`, `authOAuthUtilsMockFns` | `vi.mock('@/app/api/auth/oauth/utils', () => authOAuthUtilsMock)` |
|
||||
| `@/app/api/knowledge/utils` | `knowledgeApiUtilsMock`, `knowledgeApiUtilsMockFns` | `vi.mock('@/app/api/knowledge/utils', () => knowledgeApiUtilsMock)` |
|
||||
| `@/app/api/workflows/utils` | `workflowsApiUtilsMock`, `workflowsApiUtilsMockFns` | `vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock)` |
|
||||
| `@/lib/audit/log` | `auditMock`, `auditMockFns` | `vi.mock('@/lib/audit/log', () => auditMock)` |
|
||||
| `@sim/audit` | `auditMock`, `auditMockFns` | `vi.mock('@sim/audit', () => auditMock)` |
|
||||
| `@/lib/auth` | `authMock`, `authMockFns` | `vi.mock('@/lib/auth', () => authMock)` |
|
||||
| `@/lib/auth/hybrid` | `hybridAuthMock`, `hybridAuthMockFns` | `vi.mock('@/lib/auth/hybrid', () => hybridAuthMock)` |
|
||||
| `@/lib/copilot/request/http` | `copilotHttpMock`, `copilotHttpMockFns` | `vi.mock('@/lib/copilot/request/http', () => copilotHttpMock)` |
|
||||
|
||||
@@ -144,7 +144,7 @@ vi.useFakeTimers()
|
||||
| `@/app/api/auth/oauth/utils` | `authOAuthUtilsMock`, `authOAuthUtilsMockFns` | `vi.mock('@/app/api/auth/oauth/utils', () => authOAuthUtilsMock)` |
|
||||
| `@/app/api/knowledge/utils` | `knowledgeApiUtilsMock`, `knowledgeApiUtilsMockFns` | `vi.mock('@/app/api/knowledge/utils', () => knowledgeApiUtilsMock)` |
|
||||
| `@/app/api/workflows/utils` | `workflowsApiUtilsMock`, `workflowsApiUtilsMockFns` | `vi.mock('@/app/api/workflows/utils', () => workflowsApiUtilsMock)` |
|
||||
| `@/lib/audit/log` | `auditMock`, `auditMockFns` | `vi.mock('@/lib/audit/log', () => auditMock)` |
|
||||
| `@sim/audit` | `auditMock`, `auditMockFns` | `vi.mock('@sim/audit', () => auditMock)` |
|
||||
| `@/lib/auth` | `authMock`, `authMockFns` | `vi.mock('@/lib/auth', () => authMock)` |
|
||||
| `@/lib/auth/hybrid` | `hybridAuthMock`, `hybridAuthMockFns` | `vi.mock('@/lib/auth/hybrid', () => hybridAuthMock)` |
|
||||
| `@/lib/copilot/request/http` | `copilotHttpMock`, `copilotHttpMockFns` | `vi.mock('@/lib/copilot/request/http', () => copilotHttpMock)` |
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM oven/bun:1.3.11-alpine
|
||||
FROM oven/bun:1.3.13-alpine
|
||||
|
||||
# Install necessary packages for development
|
||||
RUN apk add --no-cache \
|
||||
|
||||
@@ -71,7 +71,7 @@ fi
|
||||
|
||||
# Set up environment variables if .env doesn't exist for the sim app
|
||||
if [ ! -f "apps/sim/.env" ]; then
|
||||
echo "📄 Creating .env file from template..."
|
||||
echo "📄 Creating apps/sim/.env from template..."
|
||||
if [ -f "apps/sim/.env.example" ]; then
|
||||
cp apps/sim/.env.example apps/sim/.env
|
||||
else
|
||||
@@ -79,6 +79,18 @@ if [ ! -f "apps/sim/.env" ]; then
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set up env for the realtime server (must match the shared values in apps/sim/.env)
|
||||
if [ ! -f "apps/realtime/.env" ] && [ -f "apps/realtime/.env.example" ]; then
|
||||
echo "📄 Creating apps/realtime/.env from template..."
|
||||
cp apps/realtime/.env.example apps/realtime/.env
|
||||
fi
|
||||
|
||||
# Set up packages/db/.env for drizzle-kit and migration scripts
|
||||
if [ ! -f "packages/db/.env" ] && [ -f "packages/db/.env.example" ]; then
|
||||
echo "📄 Creating packages/db/.env from template..."
|
||||
cp packages/db/.env.example packages/db/.env
|
||||
fi
|
||||
|
||||
# Generate schema and run database migrations
|
||||
echo "🗃️ Running database schema generation and migrations..."
|
||||
echo "Generating schema..."
|
||||
|
||||
259
.github/CONTRIBUTING.md
vendored
259
.github/CONTRIBUTING.md
vendored
@@ -2,8 +2,15 @@
|
||||
|
||||
Thank you for your interest in contributing to Sim! Our goal is to provide developers with a powerful, user-friendly platform for building, testing, and optimizing agentic workflows. We welcome contributions in all forms—from bug fixes and design improvements to brand-new features.
|
||||
|
||||
> **Project Overview:**
|
||||
> Sim is a monorepo using Turborepo, containing the main application (`apps/sim/`), documentation (`apps/docs/`), and shared packages (`packages/`). The main application is built with Next.js (app router), ReactFlow, Zustand, Shadcn, and Tailwind CSS. Please ensure your contributions follow our best practices for clarity, maintainability, and consistency.
|
||||
> **Project Overview:**
|
||||
> Sim is a Turborepo monorepo with two deployable apps and a set of shared packages:
|
||||
>
|
||||
> - `apps/sim/` — the main Next.js application (App Router, ReactFlow, Zustand, Shadcn, Tailwind CSS).
|
||||
> - `apps/realtime/` — a small Bun + Socket.IO server that powers the collaborative canvas. Shares DB and Better Auth secrets with `apps/sim` via `@sim/*` packages.
|
||||
> - `apps/docs/` — Fumadocs-based documentation site.
|
||||
> - `packages/` — shared workspace packages (`@sim/db`, `@sim/auth`, `@sim/audit`, `@sim/workflow-types`, `@sim/workflow-persistence`, `@sim/workflow-authz`, `@sim/realtime-protocol`, `@sim/security`, `@sim/logger`, `@sim/utils`, `@sim/testing`, `@sim/tsconfig`).
|
||||
>
|
||||
> Strict one-way dependency flow: `apps/* → packages/*`. Packages never import from apps. Please ensure your contributions follow this and our best practices for clarity, maintainability, and consistency.
|
||||
|
||||
---
|
||||
|
||||
@@ -24,14 +31,17 @@ Thank you for your interest in contributing to Sim! Our goal is to provide devel
|
||||
|
||||
We strive to keep our workflow as simple as possible. To contribute:
|
||||
|
||||
1. **Fork the Repository**
|
||||
1. **Fork the Repository**
|
||||
Click the **Fork** button on GitHub to create your own copy of the project.
|
||||
|
||||
2. **Clone Your Fork**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
```
|
||||
3. **Create a Feature Branch**
|
||||
|
||||
3. **Create a Feature Branch**
|
||||
Create a new branch with a descriptive name:
|
||||
|
||||
```bash
|
||||
@@ -40,21 +50,23 @@ We strive to keep our workflow as simple as possible. To contribute:
|
||||
|
||||
Use a clear naming convention to indicate the type of work (e.g., `feat/`, `fix/`, `docs/`).
|
||||
|
||||
4. **Make Your Changes**
|
||||
4. **Make Your Changes**
|
||||
Ensure your changes are small, focused, and adhere to our coding guidelines.
|
||||
|
||||
5. **Commit Your Changes**
|
||||
5. **Commit Your Changes**
|
||||
Write clear, descriptive commit messages that follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/#specification) specification. This allows us to maintain a coherent project history and generate changelogs automatically. For example:
|
||||
|
||||
- `feat(api): add new endpoint for user authentication`
|
||||
- `fix(ui): resolve button alignment issue`
|
||||
- `docs: update contribution guidelines`
|
||||
|
||||
6. **Push Your Branch**
|
||||
|
||||
```bash
|
||||
git push origin feat/your-feature-name
|
||||
```
|
||||
|
||||
7. **Create a Pull Request**
|
||||
7. **Create a Pull Request**
|
||||
Open a pull request against the `staging` branch on GitHub. Please provide a clear description of the changes and reference any relevant issues (e.g., `fixes #123`).
|
||||
|
||||
---
|
||||
@@ -65,7 +77,7 @@ If you discover a bug or have a feature request, please open an issue in our Git
|
||||
|
||||
- Provide a clear, descriptive title.
|
||||
- Include as many details as possible (steps to reproduce, screenshots, etc.).
|
||||
- **Tag Your Issue Appropriately:**
|
||||
- **Tag Your Issue Appropriately:**
|
||||
Use the following labels to help us categorize your issue:
|
||||
- **active:** Actively working on it right now.
|
||||
- **bug:** Something isn't working.
|
||||
@@ -82,12 +94,11 @@ If you discover a bug or have a feature request, please open an issue in our Git
|
||||
|
||||
Before creating a pull request:
|
||||
|
||||
- **Ensure Your Branch Is Up-to-Date:**
|
||||
- **Ensure Your Branch Is Up-to-Date:**
|
||||
Rebase your branch onto the latest `staging` branch to prevent merge conflicts.
|
||||
- **Follow the Guidelines:**
|
||||
- **Follow the Guidelines:**
|
||||
Make sure your changes are well-tested, follow our coding standards, and include relevant documentation if necessary.
|
||||
|
||||
- **Reference Issues:**
|
||||
- **Reference Issues:**
|
||||
If your PR addresses an existing issue, include `refs #<issue-number>` or `fixes #<issue-number>` in your PR description.
|
||||
|
||||
Our maintainers will review your pull request and provide feedback. We aim to make the review process as smooth and timely as possible.
|
||||
@@ -166,27 +177,27 @@ To use local models with Sim:
|
||||
|
||||
1. Install Ollama and pull models:
|
||||
|
||||
```bash
|
||||
# Install Ollama (if not already installed)
|
||||
curl -fsSL https://ollama.ai/install.sh | sh
|
||||
```bash
|
||||
# Install Ollama (if not already installed)
|
||||
curl -fsSL https://ollama.ai/install.sh | sh
|
||||
|
||||
# Pull a model (e.g., gemma3:4b)
|
||||
ollama pull gemma3:4b
|
||||
```
|
||||
# Pull a model (e.g., gemma3:4b)
|
||||
ollama pull gemma3:4b
|
||||
```
|
||||
|
||||
2. Start Sim with local model support:
|
||||
|
||||
```bash
|
||||
# With NVIDIA GPU support
|
||||
docker compose --profile local-gpu -f docker-compose.ollama.yml up -d
|
||||
```bash
|
||||
# With NVIDIA GPU support
|
||||
docker compose --profile local-gpu -f docker-compose.ollama.yml up -d
|
||||
|
||||
# Without GPU (CPU only)
|
||||
docker compose --profile local-cpu -f docker-compose.ollama.yml up -d
|
||||
# Without GPU (CPU only)
|
||||
docker compose --profile local-cpu -f docker-compose.ollama.yml up -d
|
||||
|
||||
# If hosting on a server, update the environment variables in the docker-compose.prod.yml file
|
||||
# to include the server's public IP then start again (OLLAMA_URL to i.e. http://1.1.1.1:11434)
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
# If hosting on a server, update the environment variables in the docker-compose.prod.yml file
|
||||
# to include the server's public IP then start again (OLLAMA_URL to i.e. http://1.1.1.1:11434)
|
||||
docker compose -f docker-compose.prod.yml up -d
|
||||
```
|
||||
|
||||
### Option 3: Using VS Code / Cursor Dev Containers
|
||||
|
||||
@@ -201,61 +212,104 @@ Dev Containers provide a consistent and easy-to-use development environment:
|
||||
2. **Setup Steps:**
|
||||
|
||||
- Clone the repository:
|
||||
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
```
|
||||
- Open the project in VS Code/Cursor
|
||||
- When prompted, click "Reopen in Container" (or press F1 and select "Remote-Containers: Reopen in Container")
|
||||
- Wait for the container to build and initialize
|
||||
|
||||
- Open the project in VS Code/Cursor.
|
||||
- When prompted, click "Reopen in Container" (or press F1 and select "Remote-Containers: Reopen in Container").
|
||||
- Wait for the container to build and initialize.
|
||||
|
||||
3. **Start Developing:**
|
||||
|
||||
- Run `bun run dev:full` in the terminal or use the `sim-start` alias
|
||||
- This starts both the main application and the realtime socket server
|
||||
- All dependencies and configurations are automatically set up
|
||||
- Your changes will be automatically hot-reloaded
|
||||
- Run `bun run dev:full` in the terminal or use the `sim-start` alias.
|
||||
- This starts both the main application and the realtime socket server.
|
||||
- All dependencies and configurations are automatically set up.
|
||||
- Your changes will be automatically hot-reloaded.
|
||||
|
||||
4. **GitHub Codespaces:**
|
||||
- This setup also works with GitHub Codespaces if you prefer development in the browser
|
||||
- Just click "Code" → "Codespaces" → "Create codespace on staging"
|
||||
|
||||
- This setup also works with GitHub Codespaces if you prefer development in the browser.
|
||||
- Just click "Code" → "Codespaces" → "Create codespace on staging".
|
||||
|
||||
### Option 4: Manual Setup
|
||||
|
||||
If you prefer not to use Docker or Dev Containers:
|
||||
If you prefer not to use Docker or Dev Containers. **All commands run from the repository root unless explicitly noted.**
|
||||
|
||||
1. **Clone and Install:**
|
||||
|
||||
1. **Clone the Repository:**
|
||||
```bash
|
||||
git clone https://github.com/<your-username>/sim.git
|
||||
cd sim
|
||||
bun install
|
||||
```
|
||||
|
||||
2. **Set Up Environment:**
|
||||
Bun workspaces handle dependency resolution for all apps and packages from the root `bun install`.
|
||||
|
||||
- Navigate to the app directory:
|
||||
```bash
|
||||
cd apps/sim
|
||||
```
|
||||
- Copy `.env.example` to `.env`
|
||||
- Configure required variables (DATABASE_URL, BETTER_AUTH_SECRET, BETTER_AUTH_URL)
|
||||
2. **Set Up Environment Files:**
|
||||
|
||||
3. **Set Up Database:**
|
||||
We use **per-app `.env` files** (the Turborepo-canonical pattern), not a single root `.env`. Three files are needed for local dev:
|
||||
|
||||
```bash
|
||||
bunx drizzle-kit push
|
||||
# Main app — large, app-specific (OAuth secrets, LLM keys, Stripe, etc.)
|
||||
cp apps/sim/.env.example apps/sim/.env
|
||||
|
||||
# Realtime server — small, only the values shared with the main app
|
||||
cp apps/realtime/.env.example apps/realtime/.env
|
||||
|
||||
# DB tooling (drizzle-kit, db:migrate)
|
||||
cp packages/db/.env.example packages/db/.env
|
||||
```
|
||||
|
||||
4. **Run the Development Server:**
|
||||
At minimum, each `.env` needs `DATABASE_URL`. `apps/sim/.env` and `apps/realtime/.env` additionally need matching values for `BETTER_AUTH_URL`, `BETTER_AUTH_SECRET`, `INTERNAL_API_SECRET`, and `NEXT_PUBLIC_APP_URL`. `apps/sim/.env` also needs `ENCRYPTION_KEY` and `API_ENCRYPTION_KEY`. Generate any 32-char secrets with `openssl rand -hex 32`.
|
||||
|
||||
The same `BETTER_AUTH_SECRET`, `INTERNAL_API_SECRET`, and `DATABASE_URL` must appear in both `apps/sim/.env` and `apps/realtime/.env` so the two services share auth and DB. After editing `apps/sim/.env`, you can mirror the shared subset into the realtime env in one shot:
|
||||
|
||||
```bash
|
||||
grep -E '^(DATABASE_URL|BETTER_AUTH_URL|BETTER_AUTH_SECRET|INTERNAL_API_SECRET|NEXT_PUBLIC_APP_URL|REDIS_URL)=' apps/sim/.env > apps/realtime/.env
|
||||
grep -E '^DATABASE_URL=' apps/sim/.env > packages/db/.env
|
||||
```
|
||||
|
||||
3. **Run Database Migrations:**
|
||||
|
||||
Migrations live in `packages/db/migrations/`. Run them via the dedicated workspace script:
|
||||
|
||||
```bash
|
||||
cd packages/db && bun run db:migrate && cd ../..
|
||||
```
|
||||
|
||||
For ad-hoc schema iteration during development you can also use `bun run db:push` from `packages/db`, but `db:migrate` is the canonical command for both local and CI/CD setups.
|
||||
|
||||
4. **Run the Development Servers:**
|
||||
|
||||
```bash
|
||||
bun run dev:full
|
||||
```
|
||||
|
||||
This command starts both the main application and the realtime socket server required for full functionality.
|
||||
This launches both apps with coloured prefixes:
|
||||
|
||||
- `[App]` — Next.js on `http://localhost:3000`
|
||||
- `[Realtime]` — Socket.IO on `http://localhost:3002`
|
||||
|
||||
Or run them separately:
|
||||
|
||||
```bash
|
||||
bun run dev # Next.js app only
|
||||
bun run dev:sockets # realtime server only
|
||||
```
|
||||
|
||||
5. **Make Your Changes and Test Locally.**
|
||||
|
||||
Before opening a PR, run the same checks CI runs:
|
||||
|
||||
```bash
|
||||
bun run type-check # TypeScript across every workspace
|
||||
bun run lint:check # Biome lint across every workspace
|
||||
bun run test # Vitest across every workspace
|
||||
```
|
||||
|
||||
### Email Template Development
|
||||
|
||||
When working on email templates, you can preview them using a local email preview server:
|
||||
@@ -263,18 +317,19 @@ When working on email templates, you can preview them using a local email previe
|
||||
1. **Run the Email Preview Server:**
|
||||
|
||||
```bash
|
||||
bun run email:dev
|
||||
cd apps/sim && bun run email:dev
|
||||
```
|
||||
|
||||
2. **Access the Preview:**
|
||||
|
||||
- Open `http://localhost:3000` in your browser
|
||||
- You'll see a list of all email templates
|
||||
- Click on any template to view and test it with various parameters
|
||||
- Open `http://localhost:3000` in your browser.
|
||||
- You'll see a list of all email templates.
|
||||
- Click on any template to view and test it with various parameters.
|
||||
|
||||
3. **Templates Location:**
|
||||
- Email templates are located in `sim/app/emails/`
|
||||
- After making changes to templates, they will automatically update in the preview
|
||||
|
||||
- Email templates live in `apps/sim/components/emails/`.
|
||||
- Changes hot-reload automatically in the preview.
|
||||
|
||||
---
|
||||
|
||||
@@ -282,28 +337,41 @@ When working on email templates, you can preview them using a local email previe
|
||||
|
||||
Sim is built in a modular fashion where blocks and tools extend the platform's functionality. To maintain consistency and quality, please follow the guidelines below when adding a new block or tool.
|
||||
|
||||
> **Use the skill guides for step-by-step recipes.** The repository ships opinionated, end-to-end guides under `.agents/skills/` that cover the exact file layout, conventions, registry wiring, and gotchas for each kind of contribution. Read the relevant SKILL.md before you start writing code:
|
||||
>
|
||||
> | Adding… | Read |
|
||||
> | ------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- |
|
||||
> | A new integration end-to-end (tools + block + icon + optional triggers + all registrations) | [`.agents/skills/add-integration/SKILL.md`](../.agents/skills/add-integration/SKILL.md) |
|
||||
> | Just a block (or aligning an existing block with its tools) | [`.agents/skills/add-block/SKILL.md`](../.agents/skills/add-block/SKILL.md) |
|
||||
> | Just tool configs for a service | [`.agents/skills/add-tools/SKILL.md`](../.agents/skills/add-tools/SKILL.md) |
|
||||
> | A webhook trigger for a service | [`.agents/skills/add-trigger/SKILL.md`](../.agents/skills/add-trigger/SKILL.md) |
|
||||
> | A knowledge-base connector (sync docs from an external source) | [`.agents/skills/add-connector/SKILL.md`](../.agents/skills/add-connector/SKILL.md) |
|
||||
>
|
||||
> The shorter overview below is a high-level reference; the SKILL.md files are the authoritative source of truth and stay in sync with the codebase.
|
||||
|
||||
### Where to Add Your Code
|
||||
|
||||
- **Blocks:** Create your new block file under the `/apps/sim/blocks/blocks` directory. The name of the file should match the provider name (e.g., `pinecone.ts`).
|
||||
- **Tools:** Create a new directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`).
|
||||
- **Blocks:** Create your new block file under the `apps/sim/blocks/blocks/` directory. The name of the file should match the provider name (e.g., `pinecone.ts`).
|
||||
- **Tools:** Create a new directory under `apps/sim/tools/` with the same name as the provider (e.g., `apps/sim/tools/pinecone`).
|
||||
|
||||
In addition, you will need to update the registries:
|
||||
|
||||
- **Block Registry:** Update the blocks index (`/apps/sim/blocks/index.ts`) to include your new block.
|
||||
- **Tool Registry:** Update the tools registry (`/apps/sim/tools/index.ts`) to add your new tool.
|
||||
- **Block Registry:** Add your block to `apps/sim/blocks/registry.ts`. (`apps/sim/blocks/index.ts` re-exports lookups from the registry; you do not need to edit it.)
|
||||
- **Tool Registry:** Add your tool to `apps/sim/tools/index.ts`.
|
||||
|
||||
### How to Create a New Block
|
||||
|
||||
1. **Create a New File:**
|
||||
Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `/apps/sim/blocks/blocks` directory.
|
||||
1. **Create a New File:**
|
||||
Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `apps/sim/blocks/blocks/` directory.
|
||||
|
||||
2. **Create a New Icon:**
|
||||
Create a new icon for your block in the `/apps/sim/components/icons.tsx` file. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`).
|
||||
Create a new icon for your block in `apps/sim/components/icons.tsx`. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`).
|
||||
|
||||
3. **Define the Block Configuration:**
|
||||
3. **Define the Block Configuration:**
|
||||
Your block should export a constant of type `BlockConfig`. For example:
|
||||
|
||||
```typescript:/apps/sim/blocks/blocks/pinecone.ts
|
||||
```typescript
|
||||
// apps/sim/blocks/blocks/pinecone.ts
|
||||
import { PineconeIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import type { PineconeResponse } from '@/tools/pinecone/types'
|
||||
@@ -321,7 +389,7 @@ In addition, you will need to update the registries:
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown'
|
||||
type: 'dropdown',
|
||||
required: true,
|
||||
options: [
|
||||
{ label: 'Generate Embeddings', id: 'generate' },
|
||||
@@ -332,7 +400,7 @@ In addition, you will need to update the registries:
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input'
|
||||
type: 'short-input',
|
||||
placeholder: 'Your Pinecone API key',
|
||||
password: true,
|
||||
required: true,
|
||||
@@ -370,10 +438,11 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
4. **Register Your Block:**
|
||||
Add your block to the blocks registry (`/apps/sim/blocks/registry.ts`):
|
||||
4. **Register Your Block:**
|
||||
Add your block to the blocks registry (`apps/sim/blocks/registry.ts`):
|
||||
|
||||
```typescript:/apps/sim/blocks/registry.ts
|
||||
```typescript
|
||||
// apps/sim/blocks/registry.ts
|
||||
import { PineconeBlock } from '@/blocks/blocks/pinecone'
|
||||
|
||||
// Registry of all available blocks
|
||||
@@ -385,24 +454,25 @@ In addition, you will need to update the registries:
|
||||
|
||||
The block will be automatically available to the application through the registry.
|
||||
|
||||
5. **Test Your Block:**
|
||||
5. **Test Your Block:**
|
||||
Ensure that the block displays correctly in the UI and that its functionality works as expected.
|
||||
|
||||
### How to Create a New Tool
|
||||
|
||||
1. **Create a New Directory:**
|
||||
Create a directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`).
|
||||
1. **Create a New Directory:**
|
||||
Create a directory under `apps/sim/tools/` with the same name as the provider (e.g., `apps/sim/tools/pinecone`).
|
||||
|
||||
2. **Create Tool Files:**
|
||||
2. **Create Tool Files:**
|
||||
Create separate files for each tool functionality with descriptive names (e.g., `fetch.ts`, `generate_embeddings.ts`, `search_text.ts`) in your tool directory.
|
||||
|
||||
3. **Create a Types File:**
|
||||
3. **Create a Types File:**
|
||||
Create a `types.ts` file in your tool directory to define and export all types related to your tools.
|
||||
|
||||
4. **Create an Index File:**
|
||||
4. **Create an Index File:**
|
||||
Create an `index.ts` file in your tool directory that imports and exports all tools:
|
||||
|
||||
```typescript:/apps/sim/tools/pinecone/index.ts
|
||||
```typescript
|
||||
// apps/sim/tools/pinecone/index.ts
|
||||
import { fetchTool } from './fetch'
|
||||
import { generateEmbeddingsTool } from './generate_embeddings'
|
||||
import { searchTextTool } from './search_text'
|
||||
@@ -410,10 +480,11 @@ In addition, you will need to update the registries:
|
||||
export { fetchTool, generateEmbeddingsTool, searchTextTool }
|
||||
```
|
||||
|
||||
5. **Define the Tool Configuration:**
|
||||
5. **Define the Tool Configuration:**
|
||||
Your tool should export a constant with a naming convention of `{toolName}Tool`. The tool ID should follow the format `{provider}_{tool_name}`. For example:
|
||||
|
||||
```typescript:/apps/sim/tools/pinecone/fetch.ts
|
||||
```typescript
|
||||
// apps/sim/tools/pinecone/fetch.ts
|
||||
import { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
import { PineconeParams, PineconeResponse } from '@/tools/pinecone/types'
|
||||
|
||||
@@ -449,11 +520,12 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
6. **Register Your Tool:**
|
||||
Update the tools registry in `/apps/sim/tools/index.ts` to include your new tool:
|
||||
6. **Register Your Tool:**
|
||||
Update the tools registry in `apps/sim/tools/index.ts` to include your new tool:
|
||||
|
||||
```typescript:/apps/sim/tools/index.ts
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from '/@tools/pinecone'
|
||||
```typescript
|
||||
// apps/sim/tools/index.ts
|
||||
import { fetchTool, generateEmbeddingsTool, searchTextTool } from '@/tools/pinecone'
|
||||
// ... other imports
|
||||
|
||||
export const tools: Record<string, ToolConfig> = {
|
||||
@@ -464,13 +536,14 @@ In addition, you will need to update the registries:
|
||||
}
|
||||
```
|
||||
|
||||
7. **Test Your Tool:**
|
||||
7. **Test Your Tool:**
|
||||
Ensure that your tool functions correctly by making test requests and verifying the responses.
|
||||
|
||||
8. **Generate Documentation:**
|
||||
Run the documentation generator to create docs for your new tool:
|
||||
8. **Generate Documentation:**
|
||||
Run the documentation generator (from `apps/sim`) to create docs for your new tool:
|
||||
|
||||
```bash
|
||||
./scripts/generate-docs.sh
|
||||
cd apps/sim && bun run generate-docs
|
||||
```
|
||||
|
||||
### Naming Conventions
|
||||
@@ -480,7 +553,7 @@ Maintaining consistent naming across the codebase is critical for auto-generatio
|
||||
- **Block Files:** Name should match the provider (e.g., `pinecone.ts`)
|
||||
- **Block Export:** Should be named `{Provider}Block` (e.g., `PineconeBlock`)
|
||||
- **Icons:** Should be named `{Provider}Icon` (e.g., `PineconeIcon`)
|
||||
- **Tool Directories:** Should match the provider name (e.g., `/tools/pinecone/`)
|
||||
- **Tool Directories:** Should match the provider name (e.g., `tools/pinecone/`)
|
||||
- **Tool Files:** Should be named after their function (e.g., `fetch.ts`, `search_text.ts`)
|
||||
- **Tool Exports:** Should be named `{toolName}Tool` (e.g., `fetchTool`)
|
||||
- **Tool IDs:** Should follow the format `{provider}_{tool_name}` (e.g., `pinecone_fetch`)
|
||||
@@ -489,12 +562,12 @@ Maintaining consistent naming across the codebase is critical for auto-generatio
|
||||
|
||||
Sim implements a sophisticated parameter visibility system that controls how parameters are exposed to users and LLMs in agent workflows. Each parameter can have one of four visibility levels:
|
||||
|
||||
| Visibility | User Sees | LLM Sees | How It Gets Set |
|
||||
|-------------|-----------|----------|--------------------------------|
|
||||
| `user-only` | ✅ Yes | ❌ No | User provides in UI |
|
||||
| `user-or-llm` | ✅ Yes | ✅ Yes | User provides OR LLM generates |
|
||||
| `llm-only` | ❌ No | ✅ Yes | LLM generates only |
|
||||
| `hidden` | ❌ No | ❌ No | Application injects at runtime |
|
||||
| Visibility | User Sees | LLM Sees | How It Gets Set |
|
||||
| ------------- | --------- | -------- | ------------------------------ |
|
||||
| `user-only` | ✅ Yes | ❌ No | User provides in UI |
|
||||
| `user-or-llm` | ✅ Yes | ✅ Yes | User provides OR LLM generates |
|
||||
| `llm-only` | ❌ No | ✅ Yes | LLM generates only |
|
||||
| `hidden` | ❌ No | ❌ No | Application injects at runtime |
|
||||
|
||||
#### Visibility Guidelines
|
||||
|
||||
|
||||
2
.github/workflows/docs-embeddings.yml
vendored
2
.github/workflows/docs-embeddings.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
4
.github/workflows/i18n.yml
vendored
4
.github/workflows/i18n.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
@@ -122,7 +122,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
|
||||
2
.github/workflows/migrations.yml
vendored
2
.github/workflows/migrations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Cache Bun dependencies
|
||||
uses: actions/cache@v4
|
||||
|
||||
2
.github/workflows/publish-cli.yml
vendored
2
.github/workflows/publish-cli.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node.js for npm publishing
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
2
.github/workflows/publish-ts-sdk.yml
vendored
2
.github/workflows/publish-ts-sdk.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node.js for npm publishing
|
||||
uses: actions/setup-node@v4
|
||||
|
||||
11
.github/workflows/test-build.yml
vendored
11
.github/workflows/test-build.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v2
|
||||
with:
|
||||
bun-version: 1.3.11
|
||||
bun-version: 1.3.13
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
@@ -103,6 +103,15 @@ jobs:
|
||||
- name: Lint code
|
||||
run: bun run lint:check
|
||||
|
||||
- name: Enforce monorepo boundaries
|
||||
run: bun run check:boundaries
|
||||
|
||||
- name: Verify realtime prune graph
|
||||
run: bun run check:realtime-prune
|
||||
|
||||
- name: Type-check realtime server
|
||||
run: bunx turbo run type-check --filter=@sim/realtime
|
||||
|
||||
- name: Run tests with coverage
|
||||
env:
|
||||
NODE_OPTIONS: '--no-warnings --max-old-space-size=8192'
|
||||
|
||||
45
AGENTS.md
45
AGENTS.md
@@ -20,19 +20,42 @@ You are a professional software engineer. All code must follow best practices: a
|
||||
|
||||
### Root Structure
|
||||
```
|
||||
apps/sim/
|
||||
├── app/ # Next.js app router (pages, API routes)
|
||||
├── blocks/ # Block definitions and registry
|
||||
├── components/ # Shared UI (emcn/, ui/)
|
||||
├── executor/ # Workflow execution engine
|
||||
├── hooks/ # Shared hooks (queries/, selectors/)
|
||||
├── lib/ # App-wide utilities
|
||||
├── providers/ # LLM provider integrations
|
||||
├── stores/ # Zustand stores
|
||||
├── tools/ # Tool definitions
|
||||
└── triggers/ # Trigger definitions
|
||||
apps/
|
||||
├── sim/ # Next.js app (UI + API routes + workflow editor)
|
||||
│ ├── app/ # Next.js app router (pages, API routes)
|
||||
│ ├── blocks/ # Block definitions and registry
|
||||
│ ├── components/ # Shared UI (emcn/, ui/)
|
||||
│ ├── executor/ # Workflow execution engine
|
||||
│ ├── hooks/ # Shared hooks (queries/, selectors/)
|
||||
│ ├── lib/ # App-wide utilities
|
||||
│ ├── providers/ # LLM provider integrations
|
||||
│ ├── stores/ # Zustand stores
|
||||
│ ├── tools/ # Tool definitions
|
||||
│ └── triggers/ # Trigger definitions
|
||||
└── realtime/ # Bun Socket.IO server (collaborative canvas)
|
||||
└── src/ # auth, config, database, handlers, middleware,
|
||||
# rooms, routes, internal/webhook-cleanup.ts
|
||||
|
||||
packages/
|
||||
├── audit/ # @sim/audit — recordAudit + AuditAction + AuditResourceType
|
||||
├── auth/ # @sim/auth — @sim/auth/verify (shared Better Auth verifier)
|
||||
├── db/ # @sim/db — drizzle schema + client
|
||||
├── logger/ # @sim/logger
|
||||
├── realtime-protocol/ # @sim/realtime-protocol — socket operation constants + zod schemas
|
||||
├── security/ # @sim/security — safeCompare
|
||||
├── tsconfig/ # shared tsconfig presets
|
||||
├── utils/ # @sim/utils
|
||||
├── workflow-authz/ # @sim/workflow-authz — authorizeWorkflowByWorkspacePermission
|
||||
├── workflow-persistence/ # @sim/workflow-persistence — raw load/save + subflow helpers
|
||||
└── workflow-types/ # @sim/workflow-types — pure BlockState/Loop/Parallel/... types
|
||||
```
|
||||
|
||||
### Package boundaries
|
||||
- `apps/* → packages/*` only. Packages never import from `apps/*`.
|
||||
- Each package has explicit subpath `exports` maps; no barrels that accidentally pull in heavy halves.
|
||||
- `apps/realtime` intentionally avoids Next.js, React, the block/tool registry, provider SDKs, and the executor. CI enforces this via `scripts/check-monorepo-boundaries.ts` and `scripts/check-realtime-prune-graph.ts`.
|
||||
- Auth is shared across services via the Better Auth "Shared Database Session" pattern: both apps read the same `BETTER_AUTH_SECRET` and point at the same DB via `@sim/db`.
|
||||
|
||||
### Naming Conventions
|
||||
- Components: PascalCase (`WorkflowList`)
|
||||
- Hooks: `use` prefix (`useWorkflowOperations`)
|
||||
|
||||
@@ -28,6 +28,36 @@ export function AgentMailIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AgentPhoneIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 150 150' xmlns='http://www.w3.org/2000/svg'>
|
||||
<path
|
||||
fill='#23AF58'
|
||||
stroke='#007F3F'
|
||||
strokeWidth='0.15'
|
||||
strokeMiterlimit='10'
|
||||
d='m139.6 53.3c-1.4-2.3-4.9-3.3-7.6-4.8-2.7-1.3-4.2-2.4-5.7-3.6-1.9-1-2.5-2.7-3.3-3.2s-2.7-1.4-4.5 1.3c-2 2.7-4.5 6.6-6.6 11.1-2.3 5.4-6.3 14.9-6.3 18.9 0.5 4.9 3.1 4.6 6.1 7.2 2.5 2.1 2.8 5.8 1.5 12.5-1.3 6.6-4 12.8-7.8 19.2-3.3 5.1-5.8 8.7-10 9.1-5.3 0.5-12.5-3.1-16.8-5.6-1-0.6-2.5-0.9-3.8-0.2-1.3 0.5-2.2 1.6-3.2 3.3-1.5 2.5-4.6 7.7-5.8 12.2-0.5 3 0 6.4 2.9 9 1.4 1.2 2.8 2.5 4.4 3.4 5 2.8 9.6 4.5 16.5 4.9 5.3 0.2 9.3-1 13.4-3.1 2.4-1.3 6.6-4.2 9.6-7.3l1.1-1.2c2.8-3.1 8.8-10 11.6-14.5 2.3-3.5 4.8-7.4 6.9-12.3 2.9-6.7 4.4-14 5-17.9 1.2-7 2.4-17.5 3.4-31.1 0.1-4.3-0.3-6.1-1-7.3zm-4.5 6.7c-0.5 9.5-1.9 23.3-3.1 30.1-0.9 4.5-2.4 9.6-3.8 13.4-1.1 2.6-3.1 7-5.6 10.8-3.4 5.3-8.4 11.6-12 15.8-6.4 6.6-10.2 9.6-14.2 10.8-2.2 0.9-3.8 1.2-7 1.2-3.4-0.1-8-0.7-11.3-2.2-3-1.2-7-4-6.9-6.8 0.4-3.2 3.3-9.6 5.2-11.9 0.2-0.3 0.5-0.3 0.7-0.2 2.5 1.1 6 3.2 9.6 4.5 2.4 0.9 4.8 1.4 7.3 1.4 3.9 0 6.7-1.2 9.5-3.2 5.6-4.6 9-10.8 12.1-17.5 2-4.3 4.1-11.6 4.4-18.3 0.1-4.9-1.1-8.9-4.5-12.2-1.1-0.7-3-2.1-3-2.8 0-4.2 3.9-13 8.9-22.9 0.2-0.7 0.5-1 1.1-0.7 1.1 0.6 3 1.4 4.6 2.4 2.1 1 5.4 2.4 7.1 3.9 0.9 0.4 1 3 0.9 4.4z'
|
||||
/>
|
||||
<path
|
||||
fill='#23AF58'
|
||||
d='m104.7 27.8c-1.3-1.5-3.3-1.3-6.2-1.5l-1.9 0.2-7-0.2-31.5 0.2 1.5-9.3c2-1.1 5.1-3.5 5.8-6.3 1-2.8 0.2-5.9-2-7.4-2.3-1.9-5.8-2.4-9.3-0.8-1.6 1-4.7 3.4-5.4 6.9-0.8 4.1 2.4 6.7 4.7 7.9l-1.5 9.1-17.2 0.9c-12.3 1.1-16.3 1.2-20.6 4.3-2 1.3-3 4.5-3.4 9.8-0.6 11.3-0.7 18.7-0.6 28.3 0.4 11.2 0 36.6 3 39.8l-1.2 0.3c-3.8 0.6-4 6.2-0.5 6.6l15.5-1 69.7-7.6c2.5-0.4 4.3-0.9 4.6-4.3l3.7-71.5c0-1.9 0.2-3.6-0.2-4.4zm-49.6-17.3c0.3-2.2 2.4-3 3.3-2.8 0.7 0.4 1 1.8 0 2.8-1.5 2-3.3 1.7-3.3 0zm40 90.2c-4 1-5.5 1.5-11.5 2.4-7.7 1-19.7 2.1-31.2 3.4l-33.8 2.9c-0.7 0.2-1-0.4-1-1-0.6-6.5-1.2-20.5-1.5-39.5l0.3-23.3c0.6-7.5 0.7-8.7 4.6-9.7 5.1-0.9 7.4-1.4 14.9-1.8l19.5-0.5 41.1-0.5c1.4 0 1.9 0.4 1.9 1.5l-3.3 66.1z'
|
||||
/>
|
||||
<path
|
||||
fill='#23AF58'
|
||||
d='m38.9 52.4c-1.8 0-4 1.1-4.5 3.3-1 3.9 1 7.6 4.5 7.7 3.8 0 5-3.8 4.7-6.3-0.2-2-2-4.7-4.7-4.7z'
|
||||
/>
|
||||
<path
|
||||
fill='#23AF58'
|
||||
d='m73.5 53.9c-1.8 0-4.3 1.5-4.4 4.5-0.1 3.2 2 5.3 4.3 5.3 2.5 0 4.2-1.7 4.2-4.8 0-3.2-1.7-4.8-4.1-5z'
|
||||
/>
|
||||
<path
|
||||
fill='#23AF58'
|
||||
d='m72.1 77.1c-2.7 3.4-7.2 7.4-14.7 8.3-7.3 0.3-13.9-2.9-20-8.5-3.5-3.4-8 0-6.2 2.7 1.7 2.5 6.4 6.6 10.4 8.8 3.5 2 7.3 3.3 13.8 3.5 4.7 0 9.2-0.8 12.7-2.4 2.9-1.1 5-2.8 6-3.8 2.3-2.1 3.8-4.1 3.5-7.3-0.9-2.5-3.6-2.8-5.5-1.3z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function CrowdStrikeIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 768 500' fill='none' xmlns='http://www.w3.org/2000/svg'>
|
||||
@@ -4683,9 +4713,16 @@ export function IAMIcon(props: SVGProps<SVGSVGElement>) {
|
||||
|
||||
export function IdentityCenterIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} viewBox='0 0 24 24' xmlns='http://www.w3.org/2000/svg'>
|
||||
<svg {...props} viewBox='0 0 80 80' xmlns='http://www.w3.org/2000/svg'>
|
||||
<defs>
|
||||
<linearGradient x1='0%' y1='100%' x2='100%' y2='0%' id='identityCenterGradient'>
|
||||
<stop stopColor='#BD0816' offset='0%' />
|
||||
<stop stopColor='#FF5252' offset='100%' />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect fill='url(#identityCenterGradient)' width='80' height='80' />
|
||||
<path
|
||||
d='M13.694,14.8194562 C14.376,14.1374562 14.376,13.0294562 13.694,12.3474562 C13.353,12.0074562 12.906,11.8374562 12.459,11.8374562 C12.01,11.8374562 11.563,12.0074562 11.222,12.3474562 C10.542,13.0284562 10.542,14.1384562 11.222,14.8194562 C11.905,15.5014562 13.013,15.4994562 13.694,14.8194562 M14.718,15.1374562 L18.703,19.1204562 L17.996,19.8274562 L16.868,18.6994562 L15.793,19.7754562 L15.086,19.0684562 L16.161,17.9924562 L14.011,15.8444562 C13.545,16.1654562 13.003,16.3294562 12.458,16.3294562 C11.755,16.3294562 11.051,16.0624562 10.515,15.5264562 C9.445,14.4554562 9.445,12.7124562 10.515,11.6404562 C11.586,10.5714562 13.329,10.5694562 14.401,11.6404562 C15.351,12.5904562 15.455,14.0674562 14.718,15.1374562 M20,12.1014562 C20,14.1684562 18.505,15.0934562 17.023,15.0934562 L17.023,14.0934562 C17.487,14.0934562 19,13.9494562 19,12.1014562 C19,11.0044562 18.353,10.3894562 16.905,10.1084562 C16.68,10.0654562 16.514,9.87545615 16.501,9.64845615 C16.446,8.74445615 15.987,8.11245615 15.384,8.11245615 C15.084,8.11245615 14.854,8.24245615 14.616,8.54645615 C14.506,8.68845615 14.324,8.75945615 14.147,8.73245615 C13.968,8.70545615 13.818,8.58445615 13.755,8.41445615 C13.577,7.94345615 13.211,7.43345615 12.723,6.97745615 C12.231,6.50945615 10.883,5.50745615 8.972,6.27345615 C7.885,6.70545615 7.034,7.94945615 7.034,9.10745615 C7.034,9.23545615 7.043,9.36245615 7.058,9.48845615 C7.061,9.50945615 7.062,9.53045615 7.062,9.55145615 C7.062,9.79945615 6.882,10.0064562 6.645,10.0464562 C5.886,10.2394562 5,10.7454562 5,12.0554562 L5.005,12.2104562 C5.069,13.3254562 6.252,13.9954562 7.358,13.9984562 L8,13.9984562 L8,14.9984562 L7.357,14.9984562 C5.536,14.9944562 4.095,13.8194562 4.006,12.2644562 C4.003,12.1944562 4,12.1244562 4,12.0554562 C4,10.6944562 4.752,9.64845615 6.035,9.18845615 C6.034,9.16145615 6.034,9.13445615 6.034,9.10745615 C6.034,7.54345615 7.138,5.92545615 8.602,5.34345615 C10.298,4.66545615 12.095,5.00345615 13.409,6.24945615 C13.706,6.52745615 14.076,6.92645615 14.372,7.41345615 C14.673,7.21245615 15.008,7.11245615 15.384,7.11245615 C16.257,7.11245615 17.231,7.77145615 17.458,9.20745615 C19.145,9.63245615 20,10.6054562 20,12.1014562'
|
||||
d='M46.694,46.8194562 C47.376,46.1374562 47.376,45.0294562 46.694,44.3474562 C46.353,44.0074562 45.906,43.8374562 45.459,43.8374562 C45.01,43.8374562 44.563,44.0074562 44.222,44.3474562 C43.542,45.0284562 43.542,46.1384562 44.222,46.8194562 C44.905,47.5014562 46.013,47.4994562 46.694,46.8194562 M47.718,47.1374562 L51.703,51.1204562 L50.996,51.8274562 L49.868,50.6994562 L48.793,51.7754562 L48.086,51.0684562 L49.161,49.9924562 L47.011,47.8444562 C46.545,48.1654562 46.003,48.3294562 45.458,48.3294562 C44.755,48.3294562 44.051,48.0624562 43.515,47.5264562 C42.445,46.4554562 42.445,44.7124562 43.515,43.6404562 C44.586,42.5714562 46.329,42.5694562 47.401,43.6404562 C48.351,44.5904562 48.455,46.0674562 47.718,47.1374562 M53,44.1014562 C53,46.1684562 51.505,47.0934562 50.023,47.0934562 L50.023,46.0934562 C50.487,46.0934562 52,45.9494562 52,44.1014562 C52,43.0044562 51.353,42.3894562 49.905,42.1084562 C49.68,42.0654562 49.514,41.8754562 49.501,41.6484562 C49.446,40.7444562 48.987,40.1124562 48.384,40.1124562 C48.084,40.1124562 47.854,40.2424562 47.616,40.5464562 C47.506,40.6884562 47.324,40.7594562 47.147,40.7324562 C46.968,40.7054562 46.818,40.5844562 46.755,40.4144562 C46.577,39.9434562 46.211,39.4334562 45.723,38.9774562 C45.231,38.5094562 43.883,37.5074562 41.972,38.2734562 C40.885,38.7054562 40.034,39.9494562 40.034,41.1074562 C40.034,41.2354562 40.043,41.3624562 40.058,41.4884562 C40.061,41.5094562 40.062,41.5304562 40.062,41.5514562 C40.062,41.7994562 39.882,42.0064562 39.645,42.0464562 C38.886,42.2394562 38,42.7454562 38,44.0554562 L38.005,44.2104562 C38.069,45.3254562 39.252,45.9954562 40.358,45.9984562 L41,45.9984562 L41,46.9984562 L40.357,46.9984562 C38.536,46.9944562 37.095,45.8194562 37.006,44.2644562 C37.003,44.1944562 37,44.1244562 37,44.0554562 C37,42.6944562 37.752,41.6484562 39.035,41.1884562 C39.034,41.1614562 39.034,41.1344562 39.034,41.1074562 C39.034,39.5434562 40.138,37.9254562 41.602,37.3434562 C43.298,36.6654562 45.095,37.0034562 46.409,38.2494562 C46.706,38.5274562 47.076,38.9264562 47.372,39.4134562 C47.673,39.2124562 48.008,39.1124562 48.384,39.1124562 C49.257,39.1124562 50.231,39.7714562 50.458,41.2074562 C52.145,41.6324562 53,42.6054562 53,44.1014562 M27,53 L27,27 L53,27 L53,34 L51,34 L51,29 L29,29 L29,51 L51,51 L51,46 L53,46 L53,53 Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
</svg>
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ComponentType, SVGProps } from 'react'
|
||||
import {
|
||||
A2AIcon,
|
||||
AgentMailIcon,
|
||||
AgentPhoneIcon,
|
||||
AgiloftIcon,
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
@@ -204,6 +205,7 @@ type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
a2a: A2AIcon,
|
||||
agentmail: AgentMailIcon,
|
||||
agentphone: AgentPhoneIcon,
|
||||
agiloft: AgiloftIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
|
||||
@@ -165,7 +165,7 @@ When a user opens Mothership, their permission group is read before any block or
|
||||
<FAQ items={[
|
||||
{
|
||||
question: "Who can create and manage permission groups?",
|
||||
answer: "Any workspace admin on an Enterprise-entitled workspace can create, edit, and delete permission groups for that workspace. On Sim Cloud, the workspace's billed account must be on the Enterprise plan; on self-hosted deployments you can enable it via ACCESS_CONTROL_ENABLED."
|
||||
answer: "Any workspace admin on an Enterprise-entitled workspace can create, edit, and delete permission groups for that workspace. The workspace's billed account must be on the Enterprise plan."
|
||||
},
|
||||
{
|
||||
question: "What happens to a workflow that was built before a block was restricted?",
|
||||
|
||||
@@ -3,7 +3,6 @@ title: Audit Logs
|
||||
description: Track every action taken across your organization's workspaces
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { FAQ } from '@/components/ui/faq'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
@@ -78,9 +77,7 @@ Authorization: Bearer <api-key>
|
||||
|
||||
Paginate by passing the `nextCursor` value as the `cursor` parameter in the next request. When `nextCursor` is absent, you have reached the last page.
|
||||
|
||||
<Callout type="info">
|
||||
The API accepts both personal and workspace-scoped API keys. Rate limits apply — the response includes `X-RateLimit-*` headers with your current limit and remaining quota.
|
||||
</Callout>
|
||||
The API accepts both personal and workspace-scoped API keys. Rate limits apply — the response includes `X-RateLimit-*` headers with your current limit and remaining quota.
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -3,11 +3,10 @@ title: Data Retention
|
||||
description: Control how long execution logs, deleted resources, and copilot data are kept before permanent deletion
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { FAQ } from '@/components/ui/faq'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
Data Retention lets workspace admins on Enterprise plans configure how long three categories of data are kept before they are permanently deleted. Each workspace in your organization can have its own independent configuration.
|
||||
Data Retention lets organization owners and admins on Enterprise plans configure how long three categories of data are kept before they are permanently deleted. The configuration applies to every workspace in the organization.
|
||||
|
||||
---
|
||||
|
||||
@@ -55,42 +54,26 @@ Controls how long **Mothership data** is kept, including:
|
||||
- Run checkpoints and async tool calls
|
||||
- Inbox tasks (Sim Mailer)
|
||||
|
||||
<Callout type="info">
|
||||
Each setting is independent. You can configure a short log retention period alongside a long soft deletion cleanup period, or set any combination that fits your compliance requirements.
|
||||
</Callout>
|
||||
Each setting is independent. You can configure a short log retention period alongside a long soft deletion cleanup period, or any combination that fits your compliance requirements.
|
||||
|
||||
---
|
||||
|
||||
## Per-workspace configuration
|
||||
## Organization-wide configuration
|
||||
|
||||
Retention is configured at the **workspace level**, not organization-wide. Each workspace in your organization can have a different configuration. Changes to one workspace's settings do not affect other workspaces.
|
||||
Retention is configured at the **organization level**. A single configuration applies to every workspace in the organization — there are no per-workspace overrides.
|
||||
|
||||
---
|
||||
|
||||
## Plan defaults
|
||||
## Defaults
|
||||
|
||||
Non-enterprise workspaces use the following automatic defaults. These cannot be changed.
|
||||
|
||||
| Setting | Free | Pro | Team |
|
||||
|---------|------|-----|------|
|
||||
| Log retention | 30 days | Not configured | Not configured |
|
||||
| Soft deletion cleanup | 30 days | 90 days | 90 days |
|
||||
| Task cleanup | Not configured | Not configured | Not configured |
|
||||
|
||||
"Not configured" means that category of data is not automatically deleted on that plan.
|
||||
|
||||
Enterprise workspaces have no defaults — retention only runs for a setting once you configure it. Until configured, that category of data is not automatically deleted.
|
||||
|
||||
<Callout type="info">
|
||||
On Enterprise, setting a period to **Forever** explicitly keeps data indefinitely. Leaving a setting unconfigured has the same effect, but setting it to Forever makes the intent explicit and allows you to change it later without needing to save from scratch.
|
||||
</Callout>
|
||||
By default, all three settings are unconfigured — no data is automatically deleted in any category until you configure it. Setting a period to **Forever** has the same effect as leaving it unconfigured, but makes the intent explicit and allows you to change it later without saving from scratch.
|
||||
|
||||
---
|
||||
|
||||
<FAQ items={[
|
||||
{
|
||||
question: "Who can configure data retention settings?",
|
||||
answer: "Only workspace admins can configure data retention settings. On Sim Cloud, the workspace must be on an Enterprise plan."
|
||||
answer: "Only organization owners and admins can configure data retention settings. On Sim Cloud, the organization must be on an Enterprise plan."
|
||||
},
|
||||
{
|
||||
question: "Is deletion immediate once the retention period expires?",
|
||||
@@ -102,7 +85,7 @@ Enterprise workspaces have no defaults — retention only runs for a setting onc
|
||||
},
|
||||
{
|
||||
question: "Does the retention period apply to all workspaces in my organization?",
|
||||
answer: "No. Retention is configured per workspace. Each workspace in your organization can have a different configuration."
|
||||
answer: "Yes. Retention is configured once per organization and applies to every workspace in the organization."
|
||||
},
|
||||
{
|
||||
question: "What happens if I shorten the retention period?",
|
||||
|
||||
@@ -3,7 +3,6 @@ title: Enterprise
|
||||
description: Enterprise features for business organizations
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { FAQ } from '@/components/ui/faq'
|
||||
|
||||
Sim Enterprise provides advanced features for organizations with enhanced security, compliance, and management requirements.
|
||||
@@ -26,9 +25,9 @@ Define permission groups on a workspace to control what features and integration
|
||||
2. Create a permission group with your desired restrictions
|
||||
3. Add workspace members to the permission group
|
||||
|
||||
<Callout type="info">
|
||||
Any workspace admin on an Enterprise-entitled workspace can manage permission groups. Users not assigned to any group have full access. Permission restrictions are enforced at both UI and execution time, and apply to workflows based on the workflow's workspace.
|
||||
</Callout>
|
||||
Any workspace admin on an Enterprise-entitled workspace can manage permission groups. Users not assigned to any group have full access. Restrictions are enforced at both UI and execution time, based on the workflow's workspace.
|
||||
|
||||
See the [Access Control guide](/docs/enterprise/access-control) for full details.
|
||||
|
||||
---
|
||||
|
||||
@@ -40,69 +39,46 @@ See the [SSO setup guide](/docs/enterprise/sso) for step-by-step instructions an
|
||||
|
||||
---
|
||||
|
||||
## Self-Hosted Configuration
|
||||
## Whitelabeling
|
||||
|
||||
For self-hosted deployments, enterprise features can be enabled via environment variables without requiring billing.
|
||||
Replace Sim's default branding — logos, product name, and favicons — with your own. See the [whitelabeling guide](/docs/enterprise/whitelabeling).
|
||||
|
||||
### Environment Variables
|
||||
---
|
||||
|
||||
## Audit Logs
|
||||
|
||||
Track configuration and security-relevant actions across your organization for compliance and monitoring. See the [audit logs guide](/docs/enterprise/audit-logs).
|
||||
|
||||
---
|
||||
|
||||
## Data Retention
|
||||
|
||||
Configure how long execution logs, soft-deleted resources, and Mothership data are kept before permanent deletion. See the [data retention guide](/docs/enterprise/data-retention).
|
||||
|
||||
---
|
||||
|
||||
<FAQ items={[
|
||||
{ question: "Who can manage Enterprise features?", answer: "Workspace admins on an Enterprise-entitled workspace. Access Control, SSO, whitelabeling, audit logs, and data retention are all configured per workspace under Settings → Enterprise." },
|
||||
{ question: "Which SSO providers are supported?", answer: "Sim supports SAML 2.0 and OIDC, which works with virtually any enterprise identity provider including Okta, Azure AD (Entra ID), Google Workspace, ADFS, and OneLogin." },
|
||||
{ question: "How do access control permission groups work?", answer: "Permission groups are created per workspace and let you restrict which AI providers, workflow blocks, and platform features are available to specific members of that workspace. Each user can belong to at most one group per workspace. Users not assigned to any group have full access. Restrictions are enforced at both the UI level and at execution time based on the workflow's workspace." },
|
||||
]} />
|
||||
|
||||
---
|
||||
|
||||
## Self-hosted setup
|
||||
|
||||
Self-hosted deployments enable enterprise features via environment variables instead of billing.
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `ORGANIZATIONS_ENABLED`, `NEXT_PUBLIC_ORGANIZATIONS_ENABLED` | Enable team/organization management |
|
||||
| `ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` | Permission groups for access restrictions |
|
||||
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | Single Sign-On with SAML/OIDC |
|
||||
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling Groups for email triggers |
|
||||
| `INBOX_ENABLED`, `NEXT_PUBLIC_INBOX_ENABLED` | Sim Mailer inbox for outbound email |
|
||||
| `WHITELABELING_ENABLED`, `NEXT_PUBLIC_WHITELABELING_ENABLED` | Custom branding and white-labeling |
|
||||
| `AUDIT_LOGS_ENABLED`, `NEXT_PUBLIC_AUDIT_LOGS_ENABLED` | Audit logging for compliance and monitoring |
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Globally disable workspace/organization invitations |
|
||||
| `ORGANIZATIONS_ENABLED`, `NEXT_PUBLIC_ORGANIZATIONS_ENABLED` | Team and organization management |
|
||||
| `ACCESS_CONTROL_ENABLED`, `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` | Permission groups |
|
||||
| `SSO_ENABLED`, `NEXT_PUBLIC_SSO_ENABLED` | SAML and OIDC sign-in |
|
||||
| `WHITELABELING_ENABLED`, `NEXT_PUBLIC_WHITELABELING_ENABLED` | Custom branding |
|
||||
| `AUDIT_LOGS_ENABLED`, `NEXT_PUBLIC_AUDIT_LOGS_ENABLED` | Audit logging |
|
||||
| `NEXT_PUBLIC_DATA_RETENTION_ENABLED` | Data retention configuration |
|
||||
| `CREDENTIAL_SETS_ENABLED`, `NEXT_PUBLIC_CREDENTIAL_SETS_ENABLED` | Polling groups for email triggers |
|
||||
| `INBOX_ENABLED`, `NEXT_PUBLIC_INBOX_ENABLED` | Sim Mailer inbox |
|
||||
| `DISABLE_INVITATIONS`, `NEXT_PUBLIC_DISABLE_INVITATIONS` | Disable invitations; manage membership via Admin API |
|
||||
|
||||
### Organization Management
|
||||
|
||||
When billing is disabled, use the Admin API to manage organizations:
|
||||
|
||||
```bash
|
||||
# Create an organization
|
||||
curl -X POST https://your-instance/api/v1/admin/organizations \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"name": "My Organization", "ownerId": "user-id-here"}'
|
||||
|
||||
# Add a member
|
||||
curl -X POST https://your-instance/api/v1/admin/organizations/{orgId}/members \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"userId": "user-id-here", "role": "admin"}'
|
||||
```
|
||||
|
||||
### Workspace Members
|
||||
|
||||
When invitations are disabled, use the Admin API to manage workspace memberships directly:
|
||||
|
||||
```bash
|
||||
# Add a user to a workspace
|
||||
curl -X POST https://your-instance/api/v1/admin/workspaces/{workspaceId}/members \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"userId": "user-id-here", "permissions": "write"}'
|
||||
|
||||
# Remove a user from a workspace
|
||||
curl -X DELETE "https://your-instance/api/v1/admin/workspaces/{workspaceId}/members?userId=user-id-here" \
|
||||
-H "x-admin-key: YOUR_ADMIN_API_KEY"
|
||||
```
|
||||
|
||||
### Notes
|
||||
|
||||
- Access Control is scoped per workspace. Set `ACCESS_CONTROL_ENABLED` and `NEXT_PUBLIC_ACCESS_CONTROL_ENABLED` to enable it on every workspace in a self-hosted deployment, bypassing the Enterprise plan check.
|
||||
- When `DISABLE_INVITATIONS` is set, users cannot send invitations. Use the Admin API to manage workspace and organization memberships instead.
|
||||
|
||||
<FAQ items={[
|
||||
{ question: "What are the minimum requirements to self-host Sim?", answer: "The Docker Compose production setup includes the Sim application (8 GB memory limit), a realtime collaboration server (1 GB memory limit), and a PostgreSQL database with pgvector. A machine with at least 16 GB of RAM and 4 CPU cores is recommended. You will also need Docker and Docker Compose installed." },
|
||||
{ question: "Can I run Sim completely offline with local AI models?", answer: "Yes. Sim supports Ollama and VLLM for running local AI models. A separate Docker Compose configuration (docker-compose.ollama.yml) is available for deploying with Ollama. This lets you run workflows without any external API calls, keeping all data on your infrastructure." },
|
||||
{ question: "How does data privacy work with self-hosted deployments?", answer: "When self-hosted, all data stays on your infrastructure. Workflow definitions, execution logs, credentials, and user data are stored in your PostgreSQL database. If you use local AI models through Ollama or VLLM, no data leaves your network. When using external AI providers, only the data sent in prompts goes to those providers." },
|
||||
{ question: "Do I need a paid license to self-host Sim?", answer: "The core Sim platform is open source under Apache 2.0 and can be self-hosted for free. Enterprise features like SSO (SAML/OIDC), access control with permission groups, and organization management require an Enterprise subscription for production use. These features can be enabled via environment variables for development and evaluation without a license." },
|
||||
{ question: "Which SSO providers are supported?", answer: "Sim supports SAML 2.0 and OIDC protocols, which means it works with virtually any enterprise identity provider including Okta, Azure AD (Entra ID), Google Workspace, and OneLogin. Configuration is done through Settings in the workspace UI." },
|
||||
{ question: "How do I manage users when invitations are disabled?", answer: "Use the Admin API with your admin API key. You can create organizations, add members to organizations with specific roles, add users to workspaces with defined permissions, and remove users. All management is done through REST API calls authenticated with the x-admin-key header." },
|
||||
{ question: "Can I scale Sim horizontally for high availability?", answer: "The Docker Compose setup is designed for single-node deployments. For production scaling, you can deploy on Kubernetes with multiple application replicas behind a load balancer. The database can be scaled independently using managed PostgreSQL services. Redis can be configured for session and cache management across multiple instances." },
|
||||
{ question: "How do access control permission groups work?", answer: "Permission groups are created per workspace and let you restrict which AI providers, workflow blocks, and platform features are available to specific members of that workspace. Each user can belong to at most one group per workspace (and different groups in different workspaces). Users not assigned to any group have full access. Restrictions are enforced at both the UI level (hiding restricted options) and at execution time (blocking unauthorized operations) — execution enforcement is based on the workflow's workspace. Any workspace admin on an Enterprise-entitled workspace can manage permission groups." },
|
||||
]} />
|
||||
Once enabled, each feature is configured through the same Settings UI as Sim Cloud. When invitations are disabled, use the Admin API (`x-admin-key` header) to manage organization and workspace membership.
|
||||
|
||||
@@ -62,16 +62,14 @@ The **Callback URL** shown in the form is the endpoint your identity provider mu
|
||||
|
||||
**OIDC providers** (Okta, Microsoft Entra ID, Google Workspace, Auth0):
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/callback/{provider-id}
|
||||
https://sim.ai/api/auth/sso/callback/{provider-id}
|
||||
```
|
||||
|
||||
**SAML providers** (ADFS, Shibboleth):
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/saml2/callback/{provider-id}
|
||||
https://sim.ai/api/auth/sso/saml2/callback/{provider-id}
|
||||
```
|
||||
|
||||
For self-hosted, replace `simstudio.ai` with your instance hostname.
|
||||
|
||||
### 5. Save and test
|
||||
|
||||
Click **Save**. To test, sign out and use the **Sign in with SSO** button on the login page. Enter an email address at your configured domain — Sim will redirect you to your identity provider.
|
||||
@@ -92,7 +90,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
2. Select **OIDC - OpenID Connect**, then **Web Application**
|
||||
3. Set the **Sign-in redirect URI** to your Sim callback URL:
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/callback/okta
|
||||
https://sim.ai/api/auth/sso/callback/okta
|
||||
```
|
||||
4. Under **Assignments**, grant access to the relevant users or groups
|
||||
5. Copy the **Client ID** and **Client Secret** from the app's **General** tab
|
||||
@@ -109,9 +107,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
| Client ID | From Okta app |
|
||||
| Client Secret | From Okta app |
|
||||
|
||||
<Callout type="info">
|
||||
The issuer URL uses Okta's default authorization server (`/oauth2/default`), which is pre-configured on every Okta org. If you created a custom authorization server, replace `default` with your server name.
|
||||
</Callout>
|
||||
The issuer URL uses Okta's default authorization server, which is pre-configured on every Okta org. If you created a custom authorization server, replace `default` with your server name.
|
||||
|
||||
</Tab>
|
||||
|
||||
@@ -124,7 +120,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
1. Go to **Microsoft Entra ID → App registrations → New registration**
|
||||
2. Under **Redirect URI**, select **Web** and enter your Sim callback URL:
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/callback/azure-ad
|
||||
https://sim.ai/api/auth/sso/callback/azure-ad
|
||||
```
|
||||
3. After registration, go to **Certificates & secrets → New client secret** and copy the value immediately — it won't be shown again
|
||||
4. Go to **Overview** and copy the **Application (client) ID** and **Directory (tenant) ID**
|
||||
@@ -140,10 +136,6 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
| Client ID | Application (client) ID |
|
||||
| Client Secret | Secret value |
|
||||
|
||||
<Callout type="info">
|
||||
Replace `{tenant-id}` with your Directory (tenant) ID from the app's Overview page. Sim auto-discovers token and JWKS endpoints from the issuer.
|
||||
</Callout>
|
||||
|
||||
</Tab>
|
||||
|
||||
<Tab value="Google Workspace">
|
||||
@@ -156,7 +148,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
2. Set the application type to **Web application**
|
||||
3. Add your Sim callback URL to **Authorized redirect URIs**:
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/callback/google-workspace
|
||||
https://sim.ai/api/auth/sso/callback/google-workspace
|
||||
```
|
||||
4. Copy the **Client ID** and **Client Secret**
|
||||
|
||||
@@ -187,14 +179,12 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
2. Choose **Claims aware**, then **Enter data about the relying party manually**
|
||||
3. Set the **Relying party identifier** (Entity ID) to your Sim base URL:
|
||||
```
|
||||
https://simstudio.ai
|
||||
https://sim.ai
|
||||
```
|
||||
For self-hosted, use your instance's base URL (e.g. `https://sim.company.com`)
|
||||
4. Add an endpoint: **SAML Assertion Consumer Service** (HTTP POST) with the URL:
|
||||
```
|
||||
https://simstudio.ai/api/auth/sso/saml2/callback/adfs
|
||||
https://sim.ai/api/auth/sso/saml2/callback/adfs
|
||||
```
|
||||
For self-hosted: `https://sim.company.com/api/auth/sso/saml2/callback/adfs`
|
||||
5. Export the **Token-signing certificate** from **Certificates**: right-click → **View Certificate → Details → Copy to File**, choose **Base-64 encoded X.509 (.CER)**. The `.cer` file is PEM-encoded — rename it to `.pem` before pasting its contents into Sim.
|
||||
6. Note the **ADFS Federation Service endpoint URL** (e.g. `https://adfs.company.com/adfs/ls`)
|
||||
|
||||
@@ -204,7 +194,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
|-------|-------|
|
||||
| Provider Type | SAML |
|
||||
| Provider ID | `adfs` |
|
||||
| Issuer URL | `https://simstudio.ai` |
|
||||
| Issuer URL | `https://sim.ai` |
|
||||
| Domain | `company.com` |
|
||||
| Entry Point URL | `https://adfs.company.com/adfs/ls` |
|
||||
| Certificate | Contents of the `.pem` file |
|
||||
@@ -223,7 +213,7 @@ Click **Save**. To test, sign out and use the **Sign in with SSO** button on the
|
||||
|
||||
Once SSO is configured, users with your domain (`company.com`) can sign in through your identity provider:
|
||||
|
||||
1. User goes to `simstudio.ai` and clicks **Sign in with SSO**
|
||||
1. User goes to `sim.ai` and clicks **Sign in with SSO**
|
||||
2. They enter their work email (e.g. `alice@company.com`)
|
||||
3. Sim redirects them to your identity provider
|
||||
4. After authenticating, they are returned to Sim and added to your organization automatically
|
||||
@@ -235,10 +225,6 @@ Users who sign in via SSO for the first time are automatically provisioned and a
|
||||
Password-based login remains available. Forcing all organization members to use SSO exclusively is not yet supported.
|
||||
</Callout>
|
||||
|
||||
<Callout type="info">
|
||||
**Self-hosted:** Automatic organization provisioning requires `ORGANIZATIONS_ENABLED=true` in addition to `SSO_ENABLED=true`. Without it, SSO authentication still works — users get a valid session — but they are not automatically added to an organization.
|
||||
</Callout>
|
||||
|
||||
---
|
||||
|
||||
<FAQ items={[
|
||||
@@ -268,7 +254,7 @@ Users who sign in via SSO for the first time are automatically provisioned and a
|
||||
},
|
||||
{
|
||||
question: "What is the Callback URL?",
|
||||
answer: "The Callback URL (also called Redirect URI or ACS URL) is the endpoint in Sim that receives the authentication response from your identity provider. For OIDC providers it follows the format: https://simstudio.ai/api/auth/sso/callback/{provider-id}. For SAML providers it is: https://simstudio.ai/api/auth/sso/saml2/callback/{provider-id}. You must register this URL in your identity provider before SSO will work."
|
||||
answer: "The Callback URL (also called Redirect URI or ACS URL) is the endpoint in Sim that receives the authentication response from your identity provider. For OIDC providers it follows the format: https://sim.ai/api/auth/sso/callback/{provider-id}. For SAML providers it is: https://sim.ai/api/auth/sso/saml2/callback/{provider-id}. You must register this URL in your identity provider before SSO will work."
|
||||
},
|
||||
{
|
||||
question: "How do I update or replace an existing SSO configuration?",
|
||||
|
||||
@@ -3,7 +3,6 @@ title: Whitelabeling
|
||||
description: Replace Sim branding with your own logo, colors, and links
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { FAQ } from '@/components/ui/faq'
|
||||
import { Image } from '@/components/ui/image'
|
||||
|
||||
@@ -65,9 +64,7 @@ Whitelabeling replaces the following visual elements:
|
||||
- **Primary and accent colors** — applied to buttons, active states, and highlights
|
||||
- **Support and legal links** — help prompts and footer links point to your URLs
|
||||
|
||||
<Callout type="info">
|
||||
Whitelabeling applies only to members of your organization. Public-facing pages (login, marketing) are not affected.
|
||||
</Callout>
|
||||
Whitelabeling applies only to members of your organization. Public-facing pages (login, marketing) are not affected.
|
||||
|
||||
---
|
||||
|
||||
@@ -103,4 +100,4 @@ WHITELABELING_ENABLED=true
|
||||
NEXT_PUBLIC_WHITELABELING_ENABLED=true
|
||||
```
|
||||
|
||||
Once enabled, configure branding through **Settings → Enterprise → Whitelabeling** the same way as Sim Cloud.
|
||||
Once enabled, configure branding through **Settings → Enterprise → Whitelabeling** the same way.
|
||||
|
||||
629
apps/docs/content/docs/en/tools/agentphone.mdx
Normal file
629
apps/docs/content/docs/en/tools/agentphone.mdx
Normal file
@@ -0,0 +1,629 @@
|
||||
---
|
||||
title: AgentPhone
|
||||
description: Provision numbers, send SMS and iMessage, and place voice calls with AgentPhone
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="agentphone"
|
||||
color="linear-gradient(135deg, #1a1a1a 0%, #0a2a14 100%)"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[AgentPhone](https://agentphone.to/) is an API-first voice and messaging platform built for AI agents. AgentPhone lets you provision real phone numbers, place outbound AI voice calls, send SMS and iMessage, manage conversations and contacts, and monitor usage — all through a simple REST API designed for programmatic access.
|
||||
|
||||
**Why AgentPhone?**
|
||||
- **Agent-Native Telephony:** Purpose-built for AI agents — provision numbers, place calls, and send messages without carrier contracts or telephony plumbing.
|
||||
- **Voice + Messaging in One API:** Drive outbound AI voice calls alongside SMS, MMS, and iMessage from the same account and phone numbers.
|
||||
- **Conversation & Transcript Management:** Every call returns an ordered transcript; every message thread is tracked as a conversation with full history and metadata.
|
||||
- **Contacts Built In:** Create, search, update, and delete contacts on the account so your agents can reference people by name instead of raw phone numbers.
|
||||
- **Usage Visibility:** Inspect plan limits, current counts, and daily/monthly aggregation so workflows can stay inside guardrails.
|
||||
|
||||
**Using AgentPhone in Sim**
|
||||
|
||||
Sim's AgentPhone integration connects your agentic workflows directly to AgentPhone using an API key. With 22 operations spanning numbers, calls, conversations, contacts, and usage, you can build powerful voice and messaging automations without writing backend code.
|
||||
|
||||
**Key benefits of using AgentPhone in Sim:**
|
||||
- **Dynamic number provisioning:** Reserve US or Canadian numbers on the fly — per agent, per customer, or per workflow — and release them when no longer needed.
|
||||
- **Outbound AI voice calls:** Place calls from an agent with an optional greeting, voice override, or system prompt, and read the full transcript back as structured data once the call completes.
|
||||
- **Two-way messaging:** Send SMS, MMS, or iMessage, fetch conversation history, and react to incoming iMessages — all from inside your workflow.
|
||||
- **Contact and metadata management:** Keep an account-level contact list and attach custom JSON metadata to conversations so downstream blocks can branch on state.
|
||||
- **Operational insight:** Pull current usage stats and daily/monthly breakdowns to monitor consumption and enforce plan limits before making the next call.
|
||||
|
||||
Whether you're building an outbound AI voice agent, running automated SMS follow-ups, managing two-way customer conversations, or monitoring phone usage across your organization, AgentPhone in Sim gives you direct, secure access to the full AgentPhone API — no middleware required. Simply configure your API key, select the operation you need, and let Sim handle the rest.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Give your workflow a phone. Provision SMS- and voice-enabled numbers, send messages and tapback reactions, place outbound voice calls, manage conversations and contacts, and track usage — all through a single AgentPhone API key.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `agentphone_create_call`
|
||||
|
||||
Initiate an outbound voice call from an AgentPhone agent
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `agentId` | string | Yes | Agent that will handle the call |
|
||||
| `toNumber` | string | Yes | Phone number to call in E.164 format \(e.g. +14155551234\) |
|
||||
| `fromNumberId` | string | No | Phone number ID to use as caller ID. Must belong to the agent. If omitted, the agent's first assigned number is used. |
|
||||
| `initialGreeting` | string | No | Optional greeting spoken when the recipient answers |
|
||||
| `voice` | string | No | Voice ID override for this call \(defaults to the agent's configured voice\) |
|
||||
| `systemPrompt` | string | No | When provided, uses a built-in LLM for the conversation instead of forwarding to your webhook |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique call identifier |
|
||||
| `agentId` | string | Agent handling the call |
|
||||
| `status` | string | Initial call status |
|
||||
| `toNumber` | string | Destination phone number |
|
||||
| `fromNumber` | string | Caller ID used for the call |
|
||||
| `phoneNumberId` | string | ID of the phone number used as caller ID |
|
||||
| `direction` | string | Call direction \(outbound\) |
|
||||
| `startedAt` | string | ISO 8601 timestamp |
|
||||
|
||||
### `agentphone_create_contact`
|
||||
|
||||
Create a new contact in AgentPhone
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `phoneNumber` | string | Yes | Phone number in E.164 format \(e.g. +14155551234\) |
|
||||
| `name` | string | Yes | Contact's full name |
|
||||
| `email` | string | No | Contact's email address |
|
||||
| `notes` | string | No | Freeform notes stored on the contact |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Contact ID |
|
||||
| `phoneNumber` | string | Phone number in E.164 format |
|
||||
| `name` | string | Contact name |
|
||||
| `email` | string | Contact email address |
|
||||
| `notes` | string | Freeform notes |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 update timestamp |
|
||||
|
||||
### `agentphone_create_number`
|
||||
|
||||
Provision a new SMS- and voice-enabled phone number
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `country` | string | No | Two-letter country code \(e.g. US, CA\). Defaults to US. |
|
||||
| `areaCode` | string | No | Preferred area code \(US/CA only, e.g. "415"\). Best-effort — may be ignored if unavailable. |
|
||||
| `agentId` | string | No | Optionally attach the number to an agent immediately |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Unique phone number ID |
|
||||
| `phoneNumber` | string | Provisioned phone number in E.164 format |
|
||||
| `country` | string | Two-letter country code |
|
||||
| `status` | string | Number status \(e.g. active\) |
|
||||
| `type` | string | Number type \(e.g. sms\) |
|
||||
| `agentId` | string | Agent the number is attached to |
|
||||
| `createdAt` | string | ISO 8601 timestamp when the number was created |
|
||||
|
||||
### `agentphone_delete_contact`
|
||||
|
||||
Delete a contact by ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `contactId` | string | Yes | Contact ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | ID of the deleted contact |
|
||||
| `deleted` | boolean | Whether the contact was deleted successfully |
|
||||
|
||||
### `agentphone_get_call`
|
||||
|
||||
Fetch a call and its full transcript
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `callId` | string | Yes | ID of the call to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Call ID |
|
||||
| `agentId` | string | Agent that handled the call |
|
||||
| `phoneNumberId` | string | Phone number ID |
|
||||
| `phoneNumber` | string | Phone number used for the call |
|
||||
| `fromNumber` | string | Caller phone number |
|
||||
| `toNumber` | string | Recipient phone number |
|
||||
| `direction` | string | inbound or outbound |
|
||||
| `status` | string | Call status |
|
||||
| `startedAt` | string | ISO 8601 timestamp |
|
||||
| `endedAt` | string | ISO 8601 timestamp |
|
||||
| `durationSeconds` | number | Call duration in seconds |
|
||||
| `lastTranscriptSnippet` | string | Last transcript snippet |
|
||||
| `recordingUrl` | string | Recording audio URL |
|
||||
| `recordingAvailable` | boolean | Whether a recording is available |
|
||||
| `transcripts` | array | Ordered transcript turns for the call |
|
||||
| ↳ `id` | string | Transcript turn ID |
|
||||
| ↳ `transcript` | string | User utterance |
|
||||
| ↳ `confidence` | number | Speech recognition confidence |
|
||||
| ↳ `response` | string | Agent response \(when available\) |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp |
|
||||
|
||||
### `agentphone_get_call_transcript`
|
||||
|
||||
Get the full ordered transcript for a call
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `callId` | string | Yes | ID of the call to retrieve the transcript for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `callId` | string | Call ID |
|
||||
| `transcript` | array | Ordered transcript turns for the call |
|
||||
| ↳ `role` | string | Speaker role \(user or agent\) |
|
||||
| ↳ `content` | string | Turn content |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp |
|
||||
|
||||
### `agentphone_get_contact`
|
||||
|
||||
Fetch a single contact by ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `contactId` | string | Yes | Contact ID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Contact ID |
|
||||
| `phoneNumber` | string | Phone number in E.164 format |
|
||||
| `name` | string | Contact name |
|
||||
| `email` | string | Contact email address |
|
||||
| `notes` | string | Freeform notes |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 update timestamp |
|
||||
|
||||
### `agentphone_get_conversation`
|
||||
|
||||
Get a conversation along with its recent messages
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `conversationId` | string | Yes | Conversation ID |
|
||||
| `messageLimit` | number | No | Number of recent messages to include \(default 50, max 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Conversation ID |
|
||||
| `agentId` | string | Agent ID |
|
||||
| `phoneNumberId` | string | Phone number ID |
|
||||
| `phoneNumber` | string | Phone number |
|
||||
| `participant` | string | External participant phone number |
|
||||
| `lastMessageAt` | string | ISO 8601 timestamp |
|
||||
| `messageCount` | number | Number of messages in the conversation |
|
||||
| `metadata` | json | Custom metadata stored on the conversation |
|
||||
| `createdAt` | string | ISO 8601 timestamp |
|
||||
| `messages` | array | Recent messages in the conversation |
|
||||
| ↳ `id` | string | Message ID |
|
||||
| ↳ `body` | string | Message text |
|
||||
| ↳ `fromNumber` | string | Sender phone number |
|
||||
| ↳ `toNumber` | string | Recipient phone number |
|
||||
| ↳ `direction` | string | inbound or outbound |
|
||||
| ↳ `channel` | string | sms, mms, or imessage |
|
||||
| ↳ `mediaUrl` | string | Attached media URL |
|
||||
| ↳ `receivedAt` | string | ISO 8601 timestamp |
|
||||
|
||||
### `agentphone_get_conversation_messages`
|
||||
|
||||
Get paginated messages for a conversation
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `conversationId` | string | Yes | Conversation ID |
|
||||
| `limit` | number | No | Number of messages to return \(default 50, max 200\) |
|
||||
| `before` | string | No | Return messages received before this ISO 8601 timestamp |
|
||||
| `after` | string | No | Return messages received after this ISO 8601 timestamp |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Messages in the conversation |
|
||||
| ↳ `id` | string | Message ID |
|
||||
| ↳ `body` | string | Message text |
|
||||
| ↳ `fromNumber` | string | Sender phone number |
|
||||
| ↳ `toNumber` | string | Recipient phone number |
|
||||
| ↳ `direction` | string | inbound or outbound |
|
||||
| ↳ `channel` | string | sms, mms, or imessage |
|
||||
| ↳ `mediaUrl` | string | Attached media URL |
|
||||
| ↳ `receivedAt` | string | ISO 8601 timestamp |
|
||||
| `hasMore` | boolean | Whether more messages are available |
|
||||
|
||||
### `agentphone_get_number_messages`
|
||||
|
||||
Fetch messages received on a specific phone number
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `numberId` | string | Yes | ID of the phone number |
|
||||
| `limit` | number | No | Number of messages to return \(default 50, max 200\) |
|
||||
| `before` | string | No | Return messages received before this ISO 8601 timestamp |
|
||||
| `after` | string | No | Return messages received after this ISO 8601 timestamp |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Messages received on the number |
|
||||
| ↳ `id` | string | Message ID |
|
||||
| ↳ `from_` | string | Sender phone number \(E.164\) |
|
||||
| ↳ `to` | string | Recipient phone number \(E.164\) |
|
||||
| ↳ `body` | string | Message text |
|
||||
| ↳ `direction` | string | inbound or outbound |
|
||||
| ↳ `channel` | string | Channel \(sms, mms, etc.\) |
|
||||
| ↳ `receivedAt` | string | ISO 8601 timestamp |
|
||||
| `hasMore` | boolean | Whether more messages are available |
|
||||
|
||||
### `agentphone_get_usage`
|
||||
|
||||
Retrieve current usage statistics for the AgentPhone account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `plan` | json | Plan name and limits \(name, limits: numbers/messagesPerMonth/voiceMinutesPerMonth/maxCallDurationMinutes/concurrentCalls\) |
|
||||
| `numbers` | json | Phone number usage \(used, limit, remaining\) |
|
||||
| `stats` | json | Usage stats: totalMessages, messagesLast24h/7d/30d, totalCalls, callsLast24h/7d/30d, totalWebhookDeliveries, successfulWebhookDeliveries, failedWebhookDeliveries |
|
||||
| `periodStart` | string | Billing period start |
|
||||
| `periodEnd` | string | Billing period end |
|
||||
|
||||
### `agentphone_get_usage_daily`
|
||||
|
||||
Get a daily breakdown of usage (messages, calls, webhooks) for the last N days
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `days` | number | No | Number of days to return \(1-365, default 30\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Daily usage entries |
|
||||
| ↳ `date` | string | Day \(YYYY-MM-DD\) |
|
||||
| ↳ `messages` | number | Messages that day |
|
||||
| ↳ `calls` | number | Calls that day |
|
||||
| ↳ `webhooks` | number | Webhook deliveries that day |
|
||||
| `days` | number | Number of days returned |
|
||||
|
||||
### `agentphone_get_usage_monthly`
|
||||
|
||||
Get monthly usage aggregation (messages, calls, webhooks) for the last N months
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `months` | number | No | Number of months to return \(1-24, default 6\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Monthly usage entries |
|
||||
| ↳ `month` | string | Month \(YYYY-MM\) |
|
||||
| ↳ `messages` | number | Messages that month |
|
||||
| ↳ `calls` | number | Calls that month |
|
||||
| ↳ `webhooks` | number | Webhook deliveries that month |
|
||||
| `months` | number | Number of months returned |
|
||||
|
||||
### `agentphone_list_calls`
|
||||
|
||||
List voice calls for this AgentPhone account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `limit` | number | No | Number of results to return \(default 20, max 100\) |
|
||||
| `offset` | number | No | Number of results to skip \(min 0\) |
|
||||
| `status` | string | No | Filter by status \(completed, in-progress, failed\) |
|
||||
| `direction` | string | No | Filter by direction \(inbound, outbound\) |
|
||||
| `type` | string | No | Filter by call type \(pstn, web\) |
|
||||
| `search` | string | No | Search by phone number \(matches fromNumber or toNumber\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Calls |
|
||||
| ↳ `id` | string | Call ID |
|
||||
| ↳ `agentId` | string | Agent that handled the call |
|
||||
| ↳ `phoneNumberId` | string | Phone number ID used for the call |
|
||||
| ↳ `phoneNumber` | string | Phone number used for the call |
|
||||
| ↳ `fromNumber` | string | Caller phone number |
|
||||
| ↳ `toNumber` | string | Recipient phone number |
|
||||
| ↳ `direction` | string | inbound or outbound |
|
||||
| ↳ `status` | string | Call status |
|
||||
| ↳ `startedAt` | string | ISO 8601 timestamp |
|
||||
| ↳ `endedAt` | string | ISO 8601 timestamp |
|
||||
| ↳ `durationSeconds` | number | Call duration in seconds |
|
||||
| ↳ `lastTranscriptSnippet` | string | Last transcript snippet |
|
||||
| ↳ `recordingUrl` | string | Recording audio URL |
|
||||
| ↳ `recordingAvailable` | boolean | Whether a recording is available |
|
||||
| `hasMore` | boolean | Whether more results are available |
|
||||
| `total` | number | Total number of matching calls |
|
||||
|
||||
### `agentphone_list_contacts`
|
||||
|
||||
List contacts for this AgentPhone account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `search` | string | No | Filter by name or phone number \(case-insensitive contains\) |
|
||||
| `limit` | number | No | Number of results to return \(default 50\) |
|
||||
| `offset` | number | No | Number of results to skip \(min 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Contacts |
|
||||
| ↳ `id` | string | Contact ID |
|
||||
| ↳ `phoneNumber` | string | Phone number in E.164 format |
|
||||
| ↳ `name` | string | Contact name |
|
||||
| ↳ `email` | string | Contact email address |
|
||||
| ↳ `notes` | string | Freeform notes |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 update timestamp |
|
||||
| `hasMore` | boolean | Whether more results are available |
|
||||
| `total` | number | Total number of contacts |
|
||||
|
||||
### `agentphone_list_conversations`
|
||||
|
||||
List conversations (message threads) for this AgentPhone account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `limit` | number | No | Number of results to return \(default 20, max 100\) |
|
||||
| `offset` | number | No | Number of results to skip \(min 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Conversations |
|
||||
| ↳ `id` | string | Conversation ID |
|
||||
| ↳ `agentId` | string | Agent ID |
|
||||
| ↳ `phoneNumberId` | string | Phone number ID |
|
||||
| ↳ `phoneNumber` | string | Phone number |
|
||||
| ↳ `participant` | string | External participant phone number |
|
||||
| ↳ `lastMessageAt` | string | ISO 8601 timestamp |
|
||||
| ↳ `lastMessagePreview` | string | Last message preview |
|
||||
| ↳ `messageCount` | number | Number of messages in the conversation |
|
||||
| ↳ `metadata` | json | Custom metadata stored on the conversation |
|
||||
| ↳ `createdAt` | string | ISO 8601 timestamp |
|
||||
| `hasMore` | boolean | Whether more results are available |
|
||||
| `total` | number | Total number of conversations |
|
||||
|
||||
### `agentphone_list_numbers`
|
||||
|
||||
List all phone numbers provisioned for this AgentPhone account
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `limit` | number | No | Number of results to return \(default 20, max 100\) |
|
||||
| `offset` | number | No | Number of results to skip \(min 0\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `data` | array | Phone numbers |
|
||||
| ↳ `id` | string | Phone number ID |
|
||||
| ↳ `phoneNumber` | string | Phone number in E.164 format |
|
||||
| ↳ `country` | string | Two-letter country code |
|
||||
| ↳ `status` | string | Number status |
|
||||
| ↳ `type` | string | Number type \(e.g. sms\) |
|
||||
| ↳ `agentId` | string | Attached agent ID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `hasMore` | boolean | Whether more results are available |
|
||||
| `total` | number | Total number of phone numbers |
|
||||
|
||||
### `agentphone_react_to_message`
|
||||
|
||||
Send an iMessage tapback reaction to a message (iMessage only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `messageId` | string | Yes | ID of the message to react to |
|
||||
| `reaction` | string | Yes | Reaction type: love, like, dislike, laugh, emphasize, or question |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Reaction ID |
|
||||
| `reactionType` | string | Reaction type applied |
|
||||
| `messageId` | string | ID of the message that was reacted to |
|
||||
| `channel` | string | Channel \(imessage\) |
|
||||
|
||||
### `agentphone_release_number`
|
||||
|
||||
Release (delete) a phone number. This action is irreversible.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `numberId` | string | Yes | ID of the phone number to release |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | ID of the released phone number |
|
||||
| `released` | boolean | Whether the number was released successfully |
|
||||
|
||||
### `agentphone_send_message`
|
||||
|
||||
Send an outbound SMS or iMessage from an AgentPhone agent
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `agentId` | string | Yes | Agent sending the message |
|
||||
| `toNumber` | string | Yes | Recipient phone number in E.164 format \(e.g. +14155551234\) |
|
||||
| `body` | string | Yes | Message text to send |
|
||||
| `mediaUrl` | string | No | Optional URL of an image, video, or file to attach |
|
||||
| `numberId` | string | No | Phone number ID to send from. If omitted, the agent's first assigned number is used. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Message ID |
|
||||
| `status` | string | Delivery status |
|
||||
| `channel` | string | sms, mms, or imessage |
|
||||
| `fromNumber` | string | Sender phone number |
|
||||
| `toNumber` | string | Recipient phone number |
|
||||
|
||||
### `agentphone_update_contact`
|
||||
|
||||
Update a contact
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `contactId` | string | Yes | Contact ID |
|
||||
| `phoneNumber` | string | No | New phone number in E.164 format |
|
||||
| `name` | string | No | New contact name |
|
||||
| `email` | string | No | New email address |
|
||||
| `notes` | string | No | New freeform notes |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Contact ID |
|
||||
| `phoneNumber` | string | Phone number in E.164 format |
|
||||
| `name` | string | Contact name |
|
||||
| `email` | string | Contact email address |
|
||||
| `notes` | string | Freeform notes |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 update timestamp |
|
||||
|
||||
### `agentphone_update_conversation`
|
||||
|
||||
Update conversation metadata (stored state). Pass null to clear existing metadata.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | AgentPhone API key |
|
||||
| `conversationId` | string | Yes | Conversation ID |
|
||||
| `metadata` | json | No | Custom key-value metadata to store on the conversation. Pass null to clear existing metadata. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Conversation ID |
|
||||
| `agentId` | string | Agent ID |
|
||||
| `phoneNumberId` | string | Phone number ID |
|
||||
| `phoneNumber` | string | Phone number |
|
||||
| `participant` | string | External participant phone number |
|
||||
| `lastMessageAt` | string | ISO 8601 timestamp |
|
||||
| `messageCount` | number | Number of messages |
|
||||
| `metadata` | json | Custom metadata stored on the conversation |
|
||||
| `createdAt` | string | ISO 8601 timestamp |
|
||||
| `messages` | array | Messages in the conversation |
|
||||
| ↳ `id` | string | Message ID |
|
||||
| ↳ `body` | string | Message body |
|
||||
| ↳ `fromNumber` | string | Sender phone number |
|
||||
| ↳ `toNumber` | string | Recipient phone number |
|
||||
| ↳ `direction` | string | inbound or outbound |
|
||||
| ↳ `channel` | string | Channel \(sms, mms, etc.\) |
|
||||
| ↳ `mediaUrl` | string | Media URL if any |
|
||||
| ↳ `receivedAt` | string | ISO 8601 timestamp |
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ Integrate Ashby into the workflow. Manage candidates (list, get, create, update,
|
||||
|
||||
### `ashby_add_candidate_tag`
|
||||
|
||||
Adds a tag to a candidate in Ashby.
|
||||
Adds a tag to a candidate in Ashby and returns the updated candidate.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -52,7 +52,37 @@ Adds a tag to a candidate in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the tag was successfully added |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_change_application_stage`
|
||||
|
||||
@@ -71,8 +101,37 @@ Moves an application to a different interview stage. Requires an archive reason
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applicationId` | string | Application UUID |
|
||||
| `stageId` | string | New interview stage UUID |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_application`
|
||||
|
||||
@@ -95,7 +154,37 @@ Creates a new application for a candidate on a job. Optionally specify interview
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applicationId` | string | Created application UUID |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_candidate`
|
||||
|
||||
@@ -107,7 +196,7 @@ Creates a new candidate record in Ashby.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `name` | string | Yes | The candidate full name |
|
||||
| `email` | string | Yes | Primary email address for the candidate |
|
||||
| `email` | string | No | Primary email address for the candidate |
|
||||
| `phoneNumber` | string | No | Primary phone number for the candidate |
|
||||
| `linkedInUrl` | string | No | LinkedIn profile URL |
|
||||
| `githubUrl` | string | No | GitHub profile URL |
|
||||
@@ -117,17 +206,37 @@ Creates a new candidate record in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Created candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_create_note`
|
||||
|
||||
@@ -147,7 +256,15 @@ Creates a note on a candidate in Ashby. Supports plain text and HTML content (bo
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `noteId` | string | Created note UUID |
|
||||
| `id` | string | Created note UUID |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `content` | string | Note content |
|
||||
| `author` | object | Author of the note |
|
||||
| ↳ `id` | string | Author user UUID |
|
||||
| ↳ `firstName` | string | Author first name |
|
||||
| ↳ `lastName` | string | Author last name |
|
||||
| ↳ `email` | string | Author email |
|
||||
|
||||
### `ashby_get_application`
|
||||
|
||||
@@ -164,28 +281,37 @@ Retrieves full details about a single application by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Application UUID |
|
||||
| `status` | string | Application status \(Active, Hired, Archived, Lead\) |
|
||||
| `candidate` | object | Associated candidate |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Candidate name |
|
||||
| `job` | object | Associated job |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| `currentInterviewStage` | object | Current interview stage |
|
||||
| ↳ `id` | string | Stage UUID |
|
||||
| ↳ `title` | string | Stage title |
|
||||
| ↳ `type` | string | Stage type |
|
||||
| `source` | object | Application source |
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| `archiveReason` | object | Reason for archival |
|
||||
| ↳ `id` | string | Reason UUID |
|
||||
| ↳ `text` | string | Reason text |
|
||||
| ↳ `reasonType` | string | Reason type |
|
||||
| `archivedAt` | string | ISO 8601 archive timestamp |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_candidate`
|
||||
|
||||
@@ -202,27 +328,37 @@ Retrieves full details about a single candidate by their ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `profileUrl` | string | URL to the candidate Ashby profile |
|
||||
| `position` | string | Current position or title |
|
||||
| `company` | string | Current company |
|
||||
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||
| `githubUrl` | string | GitHub profile URL |
|
||||
| `tags` | array | Tags applied to the candidate |
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| `applicationIds` | array | IDs of associated applications |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_job`
|
||||
|
||||
@@ -239,16 +375,37 @@ Retrieves full details about a single job by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Job UUID |
|
||||
| `title` | string | Job title |
|
||||
| `status` | string | Job status \(Open, Closed, Draft, Archived\) |
|
||||
| `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| `departmentId` | string | Department UUID |
|
||||
| `locationId` | string | Location UUID |
|
||||
| `descriptionPlain` | string | Job description in plain text |
|
||||
| `isArchived` | boolean | Whether the job is archived |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_get_job_posting`
|
||||
|
||||
@@ -260,6 +417,8 @@ Retrieves full details about a single job posting by its ID.
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `jobPostingId` | string | Yes | The UUID of the job posting to fetch |
|
||||
| `expandApplicationFormDefinition` | boolean | No | Include application form definition in the response |
|
||||
| `expandSurveyFormDefinitions` | boolean | No | Include survey form definitions in the response |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -267,14 +426,56 @@ Retrieves full details about a single job posting by its ID.
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Job posting UUID |
|
||||
| `title` | string | Job posting title |
|
||||
| `jobId` | string | Associated job UUID |
|
||||
| `locationName` | string | Location name |
|
||||
| `descriptionPlain` | string | Full description in plain text |
|
||||
| `descriptionHtml` | string | Full description in HTML |
|
||||
| `descriptionSocial` | string | Shortened description for social sharing \(max 200 chars\) |
|
||||
| `descriptionParts` | object | Description broken into opening, body, and closing sections |
|
||||
| ↳ `descriptionOpening` | object | Opening \(from Job Boards theme settings\) |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| ↳ `descriptionBody` | object | Main description body |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| ↳ `descriptionClosing` | object | Closing \(from Job Boards theme settings\) |
|
||||
| ↳ `html` | string | HTML content |
|
||||
| ↳ `plain` | string | Plain text content |
|
||||
| `departmentName` | string | Department name |
|
||||
| `employmentType` | string | Employment type \(e.g. FullTime, PartTime, Contract\) |
|
||||
| `descriptionPlain` | string | Job posting description in plain text |
|
||||
| `isListed` | boolean | Whether the posting is publicly listed |
|
||||
| `teamName` | string | Team name |
|
||||
| `teamNameHierarchy` | array | Hierarchy of team names from root to team |
|
||||
| `jobId` | string | Associated job UUID |
|
||||
| `locationName` | string | Primary location name |
|
||||
| `locationIds` | object | Primary and secondary location UUIDs |
|
||||
| ↳ `primaryLocationId` | string | Primary location UUID |
|
||||
| ↳ `secondaryLocationIds` | array | Secondary location UUIDs |
|
||||
| `address` | object | Postal address of the posting location |
|
||||
| ↳ `postalAddress` | object | Structured postal address |
|
||||
| ↳ `addressCountry` | string | Country |
|
||||
| ↳ `addressRegion` | string | State or region |
|
||||
| ↳ `addressLocality` | string | City or locality |
|
||||
| ↳ `postalCode` | string | Postal code |
|
||||
| ↳ `streetAddress` | string | Street address |
|
||||
| `isRemote` | boolean | Whether the posting is remote |
|
||||
| `workplaceType` | string | Workplace type \(OnSite, Remote, Hybrid\) |
|
||||
| `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| `isListed` | boolean | Whether publicly listed on the job board |
|
||||
| `suppressDescriptionOpening` | boolean | Whether the theme opening is hidden on this posting |
|
||||
| `suppressDescriptionClosing` | boolean | Whether the theme closing is hidden on this posting |
|
||||
| `publishedDate` | string | ISO 8601 published date |
|
||||
| `applicationDeadline` | string | ISO 8601 application deadline |
|
||||
| `externalLink` | string | External link to the job posting |
|
||||
| `applyLink` | string | Direct apply link |
|
||||
| `compensation` | object | Compensation details for the posting |
|
||||
| ↳ `compensationTierSummary` | string | Human-readable tier summary |
|
||||
| ↳ `summaryComponents` | array | Structured compensation components |
|
||||
| ↳ `summary` | string | Component summary |
|
||||
| ↳ `compensationTypeLabel` | string | Component type label \(Salary, Commission, Bonus, Equity, etc.\) |
|
||||
| ↳ `interval` | string | Payment interval \(e.g. annual, hourly\) |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `minValue` | number | Minimum value |
|
||||
| ↳ `maxValue` | number | Maximum value |
|
||||
| ↳ `shouldDisplayCompensationOnJobBoard` | boolean | Whether compensation is shown on the job board |
|
||||
| `applicationLimitCalloutHtml` | string | HTML callout shown when application limit is reached |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_get_offer`
|
||||
|
||||
@@ -291,20 +492,41 @@ Retrieves full details about a single offer by its ID.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Offer UUID |
|
||||
| `offerStatus` | string | Offer status \(e.g. WaitingOnCandidateResponse, CandidateAccepted\) |
|
||||
| `acceptanceStatus` | string | Acceptance status \(e.g. Accepted, Declined, Pending\) |
|
||||
| `applicationId` | string | Associated application UUID |
|
||||
| `startDate` | string | Offer start date |
|
||||
| `salary` | object | Salary details |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `value` | number | Salary amount |
|
||||
| `openingId` | string | Associated opening UUID |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp \(from latest version\) |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_list_applications`
|
||||
|
||||
Lists all applications in an Ashby organization with pagination and optional filters for status, job, candidate, and creation date.
|
||||
Lists all applications in an Ashby organization with pagination and optional filters for status, job, and creation date.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -315,7 +537,6 @@ Lists all applications in an Ashby organization with pagination and optional fil
|
||||
| `perPage` | number | No | Number of results per page \(default 100\) |
|
||||
| `status` | string | No | Filter by application status: Active, Hired, Archived, or Lead |
|
||||
| `jobId` | string | No | Filter applications by a specific job UUID |
|
||||
| `candidateId` | string | No | Filter applications by a specific candidate UUID |
|
||||
| `createdAfter` | string | No | Filter to applications created after this ISO 8601 timestamp \(e.g. 2024-01-01T00:00:00Z\) |
|
||||
|
||||
#### Output
|
||||
@@ -323,23 +544,6 @@ Lists all applications in an Ashby organization with pagination and optional fil
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `applications` | array | List of applications |
|
||||
| ↳ `id` | string | Application UUID |
|
||||
| ↳ `status` | string | Application status \(Active, Hired, Archived, Lead\) |
|
||||
| ↳ `candidate` | object | Associated candidate |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Candidate name |
|
||||
| ↳ `job` | object | Associated job |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| ↳ `currentInterviewStage` | object | Current interview stage |
|
||||
| ↳ `id` | string | Stage UUID |
|
||||
| ↳ `title` | string | Stage title |
|
||||
| ↳ `type` | string | Stage type |
|
||||
| ↳ `source` | object | Application source |
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -352,6 +556,7 @@ Lists all archive reasons configured in Ashby.
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `includeArchived` | boolean | No | Whether to include archived archive reasons in the response \(default false\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -360,7 +565,7 @@ Lists all archive reasons configured in Ashby.
|
||||
| `archiveReasons` | array | List of archive reasons |
|
||||
| ↳ `id` | string | Archive reason UUID |
|
||||
| ↳ `text` | string | Archive reason text |
|
||||
| ↳ `reasonType` | string | Reason type |
|
||||
| ↳ `reasonType` | string | Reason type \(RejectedByCandidate, RejectedByOrg, Other\) |
|
||||
| ↳ `isArchived` | boolean | Whether the reason is archived |
|
||||
|
||||
### `ashby_list_candidate_tags`
|
||||
@@ -372,6 +577,10 @@ Lists all candidate tags configured in Ashby.
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Ashby API Key |
|
||||
| `includeArchived` | boolean | No | Whether to include archived candidate tags \(default false\) |
|
||||
| `cursor` | string | No | Opaque pagination cursor from a previous response nextCursor value |
|
||||
| `syncToken` | string | No | Sync token from a previous response to fetch only changed results |
|
||||
| `perPage` | number | No | Number of results per page \(default 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -381,6 +590,9 @@ Lists all candidate tags configured in Ashby.
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| ↳ `isArchived` | boolean | Whether the tag is archived |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
| `syncToken` | string | Sync token to use for incremental updates in future requests |
|
||||
|
||||
### `ashby_list_candidates`
|
||||
|
||||
@@ -399,18 +611,6 @@ Lists all candidates in an Ashby organization with cursor-based pagination.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `candidates` | array | List of candidates |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Full name |
|
||||
| ↳ `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| ↳ `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -431,9 +631,15 @@ Lists all custom field definitions configured in Ashby.
|
||||
| `customFields` | array | List of custom field definitions |
|
||||
| ↳ `id` | string | Custom field UUID |
|
||||
| ↳ `title` | string | Custom field title |
|
||||
| ↳ `fieldType` | string | Field type \(e.g. String, Number, Boolean\) |
|
||||
| ↳ `objectType` | string | Object type the field applies to \(e.g. Candidate, Application, Job\) |
|
||||
| ↳ `isPrivate` | boolean | Whether the custom field is private |
|
||||
| ↳ `fieldType` | string | Field data type \(MultiValueSelect, NumberRange, String, Date, ValueSelect, Number, Currency, Boolean, LongText, CompensationRange\) |
|
||||
| ↳ `objectType` | string | Object type the field applies to \(Application, Candidate, Employee, Job, Offer, Opening, Talent_Project\) |
|
||||
| ↳ `isArchived` | boolean | Whether the custom field is archived |
|
||||
| ↳ `isRequired` | boolean | Whether a value is required |
|
||||
| ↳ `selectableValues` | array | Selectable values for MultiValueSelect fields \(empty for other field types\) |
|
||||
| ↳ `label` | string | Display label |
|
||||
| ↳ `value` | string | Stored value |
|
||||
| ↳ `isArchived` | boolean | Whether archived |
|
||||
|
||||
### `ashby_list_departments`
|
||||
|
||||
@@ -452,8 +658,11 @@ Lists all departments in Ashby.
|
||||
| `departments` | array | List of departments |
|
||||
| ↳ `id` | string | Department UUID |
|
||||
| ↳ `name` | string | Department name |
|
||||
| ↳ `externalName` | string | Candidate-facing name used on job boards |
|
||||
| ↳ `isArchived` | boolean | Whether the department is archived |
|
||||
| ↳ `parentId` | string | Parent department UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_list_interviews`
|
||||
|
||||
@@ -475,10 +684,24 @@ Lists interview schedules in Ashby, optionally filtered by application or interv
|
||||
| --------- | ---- | ----------- |
|
||||
| `interviewSchedules` | array | List of interview schedules |
|
||||
| ↳ `id` | string | Interview schedule UUID |
|
||||
| ↳ `status` | string | Schedule status \(NeedsScheduling, WaitingOnCandidateBooking, Scheduled, Complete, Cancelled, OnHold, etc.\) |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `interviewStageId` | string | Interview stage UUID |
|
||||
| ↳ `status` | string | Schedule status |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| ↳ `interviewEvents` | array | Scheduled interview events on this schedule |
|
||||
| ↳ `id` | string | Event UUID |
|
||||
| ↳ `interviewId` | string | Interview template UUID |
|
||||
| ↳ `interviewScheduleId` | string | Parent schedule UUID |
|
||||
| ↳ `interviewerUserIds` | array | User UUIDs of interviewers assigned to the event |
|
||||
| ↳ `createdAt` | string | Event creation timestamp |
|
||||
| ↳ `updatedAt` | string | Event last updated timestamp |
|
||||
| ↳ `startTime` | string | Event start time |
|
||||
| ↳ `endTime` | string | Event end time |
|
||||
| ↳ `feedbackLink` | string | URL to submit feedback for the event |
|
||||
| ↳ `location` | string | Physical location |
|
||||
| ↳ `meetingLink` | string | Virtual meeting URL |
|
||||
| ↳ `hasSubmittedFeedback` | boolean | Whether any feedback has been submitted |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -500,11 +723,22 @@ Lists all job postings in Ashby.
|
||||
| ↳ `id` | string | Job posting UUID |
|
||||
| ↳ `title` | string | Job posting title |
|
||||
| ↳ `jobId` | string | Associated job UUID |
|
||||
| ↳ `locationName` | string | Location name |
|
||||
| ↳ `departmentName` | string | Department name |
|
||||
| ↳ `employmentType` | string | Employment type \(e.g. FullTime, PartTime, Contract\) |
|
||||
| ↳ `teamName` | string | Team name |
|
||||
| ↳ `locationName` | string | Primary location display name |
|
||||
| ↳ `locationIds` | object | Primary and secondary location UUIDs |
|
||||
| ↳ `primaryLocationId` | string | Primary location UUID |
|
||||
| ↳ `secondaryLocationIds` | array | Secondary location UUIDs |
|
||||
| ↳ `workplaceType` | string | Workplace type \(OnSite, Remote, Hybrid\) |
|
||||
| ↳ `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| ↳ `isListed` | boolean | Whether the posting is publicly listed |
|
||||
| ↳ `publishedDate` | string | ISO 8601 published date |
|
||||
| ↳ `applicationDeadline` | string | ISO 8601 application deadline |
|
||||
| ↳ `externalLink` | string | External link to the job posting |
|
||||
| ↳ `applyLink` | string | Direct apply link for the job posting |
|
||||
| ↳ `compensationTierSummary` | string | Compensation tier summary for job boards |
|
||||
| ↳ `shouldDisplayCompensationOnJobBoard` | boolean | Whether compensation is shown on the job board |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_list_jobs`
|
||||
|
||||
@@ -524,14 +758,6 @@ Lists all jobs in an Ashby organization. By default returns Open, Closed, and Ar
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `jobs` | array | List of jobs |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| ↳ `status` | string | Job status \(Open, Closed, Archived, Draft\) |
|
||||
| ↳ `employmentType` | string | Employment type \(FullTime, PartTime, Intern, Contract, Temporary\) |
|
||||
| ↳ `departmentId` | string | Department UUID |
|
||||
| ↳ `locationId` | string | Location UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -552,12 +778,18 @@ Lists all locations configured in Ashby.
|
||||
| `locations` | array | List of locations |
|
||||
| ↳ `id` | string | Location UUID |
|
||||
| ↳ `name` | string | Location name |
|
||||
| ↳ `externalName` | string | Candidate-facing name used on job boards |
|
||||
| ↳ `isArchived` | boolean | Whether the location is archived |
|
||||
| ↳ `isRemote` | boolean | Whether this is a remote location |
|
||||
| ↳ `address` | object | Location address |
|
||||
| ↳ `city` | string | City |
|
||||
| ↳ `region` | string | State or region |
|
||||
| ↳ `country` | string | Country |
|
||||
| ↳ `isRemote` | boolean | Whether the location is remote \(use workplaceType instead\) |
|
||||
| ↳ `workplaceType` | string | Workplace type \(OnSite, Hybrid, Remote\) |
|
||||
| ↳ `parentLocationId` | string | Parent location UUID |
|
||||
| ↳ `type` | string | Location component type \(Location, LocationHierarchy\) |
|
||||
| ↳ `address` | object | Location postal address |
|
||||
| ↳ `addressCountry` | string | Country |
|
||||
| ↳ `addressRegion` | string | State or region |
|
||||
| ↳ `addressLocality` | string | City or locality |
|
||||
| ↳ `postalCode` | string | Postal code |
|
||||
| ↳ `streetAddress` | string | Street address |
|
||||
|
||||
### `ashby_list_notes`
|
||||
|
||||
@@ -579,6 +811,7 @@ Lists all notes on a candidate with pagination support.
|
||||
| `notes` | array | List of notes on the candidate |
|
||||
| ↳ `id` | string | Note UUID |
|
||||
| ↳ `content` | string | Note content |
|
||||
| ↳ `isPrivate` | boolean | Whether the note is private |
|
||||
| ↳ `author` | object | Note author |
|
||||
| ↳ `id` | string | Author user UUID |
|
||||
| ↳ `firstName` | string | First name |
|
||||
@@ -605,16 +838,6 @@ Lists all offers with their latest version in an Ashby organization.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `offers` | array | List of offers |
|
||||
| ↳ `id` | string | Offer UUID |
|
||||
| ↳ `offerStatus` | string | Offer status |
|
||||
| ↳ `acceptanceStatus` | string | Acceptance status |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `startDate` | string | Offer start date |
|
||||
| ↳ `salary` | object | Salary details |
|
||||
| ↳ `currencyCode` | string | ISO 4217 currency code |
|
||||
| ↳ `value` | number | Salary amount |
|
||||
| ↳ `openingId` | string | Associated opening UUID |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -634,12 +857,6 @@ Lists all openings in Ashby with pagination.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `openings` | array | List of openings |
|
||||
| ↳ `id` | string | Opening UUID |
|
||||
| ↳ `openingState` | string | Opening state \(Approved, Closed, Draft, Filled, Open\) |
|
||||
| ↳ `isArchived` | boolean | Whether the opening is archived |
|
||||
| ↳ `openedAt` | string | ISO 8601 opened timestamp |
|
||||
| ↳ `closedAt` | string | ISO 8601 closed timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
@@ -661,6 +878,10 @@ Lists all candidate sources configured in Ashby.
|
||||
| ↳ `id` | string | Source UUID |
|
||||
| ↳ `title` | string | Source title |
|
||||
| ↳ `isArchived` | boolean | Whether the source is archived |
|
||||
| ↳ `sourceType` | object | Source type grouping |
|
||||
| ↳ `id` | string | Source type UUID |
|
||||
| ↳ `title` | string | Source type title |
|
||||
| ↳ `isArchived` | boolean | Whether archived |
|
||||
|
||||
### `ashby_list_users`
|
||||
|
||||
@@ -679,18 +900,12 @@ Lists all users in Ashby with pagination.
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `users` | array | List of users |
|
||||
| ↳ `id` | string | User UUID |
|
||||
| ↳ `firstName` | string | First name |
|
||||
| ↳ `lastName` | string | Last name |
|
||||
| ↳ `email` | string | Email address |
|
||||
| ↳ `isEnabled` | boolean | Whether the user account is enabled |
|
||||
| ↳ `globalRole` | string | User role \(Organization Admin, Elevated Access, Limited Access, External Recruiter\) |
|
||||
| `moreDataAvailable` | boolean | Whether more pages of results exist |
|
||||
| `nextCursor` | string | Opaque cursor for fetching the next page |
|
||||
|
||||
### `ashby_remove_candidate_tag`
|
||||
|
||||
Removes a tag from a candidate in Ashby.
|
||||
Removes a tag from a candidate in Ashby and returns the updated candidate.
|
||||
|
||||
#### Input
|
||||
|
||||
@@ -704,7 +919,37 @@ Removes a tag from a candidate in Ashby.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the tag was successfully removed |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
### `ashby_search_candidates`
|
||||
|
||||
@@ -723,18 +968,6 @@ Searches for candidates by name and/or email with AND logic. Results are limited
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `candidates` | array | Matching candidates \(max 100 results\) |
|
||||
| ↳ `id` | string | Candidate UUID |
|
||||
| ↳ `name` | string | Full name |
|
||||
| ↳ `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| ↳ `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| ↳ `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| ↳ `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
|
||||
### `ashby_update_candidate`
|
||||
|
||||
@@ -758,26 +991,36 @@ Updates an existing candidate record in Ashby. Only provided fields are changed.
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Candidate UUID |
|
||||
| `name` | string | Full name |
|
||||
| `primaryEmailAddress` | object | Primary email contact info |
|
||||
| ↳ `value` | string | Email address |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary email |
|
||||
| `primaryPhoneNumber` | object | Primary phone contact info |
|
||||
| ↳ `value` | string | Phone number |
|
||||
| ↳ `type` | string | Contact type \(Personal, Work, Other\) |
|
||||
| ↳ `isPrimary` | boolean | Whether this is the primary phone |
|
||||
| `profileUrl` | string | URL to the candidate Ashby profile |
|
||||
| `position` | string | Current position or title |
|
||||
| `company` | string | Current company |
|
||||
| `linkedInUrl` | string | LinkedIn profile URL |
|
||||
| `githubUrl` | string | GitHub profile URL |
|
||||
| `tags` | array | Tags applied to the candidate |
|
||||
| ↳ `id` | string | Tag UUID |
|
||||
| ↳ `title` | string | Tag title |
|
||||
| `applicationIds` | array | IDs of associated applications |
|
||||
| `candidates` | json | List of candidates with rich fields \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], linkedInUrl, githubUrl, profileUrl, position, company, school, timezone, location with locationComponents\[\], tags\[\], applicationIds\[\], customFields\[\], resumeFileHandle, fileHandles\[\], source with sourceType, creditedToUser, fraudStatus, createdAt, updatedAt\) |
|
||||
| `jobs` | json | List of jobs \(id, title, confidential, status, employmentType, locationId, departmentId, defaultInterviewPlanId, interviewPlanIds\[\], customFields\[\], jobPostingIds\[\], customRequisitionId, brandId, hiringTeam\[\], author, createdAt, updatedAt, openedAt, closedAt, location with address, openings\[\] with latestVersion, compensation with compensationTiers\[\]\) |
|
||||
| `applications` | json | List of applications \(id, status, customFields\[\], candidate summary, currentInterviewStage, source with sourceType, archiveReason with customFields\[\], archivedAt, job summary, creditedToUser, hiringTeam\[\], appliedViaJobPostingId, submitterClientIp, submitterUserAgent, createdAt, updatedAt\) |
|
||||
| `notes` | json | List of notes \(id, content, author, isPrivate, createdAt\) |
|
||||
| `offers` | json | List of offers \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion with id/startDate/salary/createdAt/openingId/customFields\[\]/fileHandles\[\]/author/approvalStatus\) |
|
||||
| `archiveReasons` | json | List of archive reasons \(id, text, reasonType \[RejectedByCandidate/RejectedByOrg/Other\], isArchived\) |
|
||||
| `sources` | json | List of sources \(id, title, isArchived, sourceType \{id, title, isArchived\}\) |
|
||||
| `customFields` | json | List of custom field definitions \(id, title, isPrivate, fieldType, objectType, isArchived, isRequired, selectableValues\[\] \{label, value, isArchived\}\) |
|
||||
| `departments` | json | List of departments \(id, name, externalName, isArchived, parentId, createdAt, updatedAt\) |
|
||||
| `locations` | json | List of locations \(id, name, externalName, isArchived, isRemote, workplaceType, parentLocationId, type, address with addressCountry/Region/Locality/postalCode/streetAddress\) |
|
||||
| `jobPostings` | json | List of job postings \(id, title, jobId, departmentName, teamName, locationName, locationIds, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensationTierSummary, shouldDisplayCompensationOnJobBoard, updatedAt\) |
|
||||
| `openings` | json | List of openings \(id, openedAt, closedAt, isArchived, archivedAt, closeReasonId, openingState, latestVersion with identifier/description/authorId/createdAt/teamId/jobIds\[\]/targetHireDate/targetStartDate/isBackfill/employmentType/locationIds\[\]/hiringTeam\[\]/customFields\[\]\) |
|
||||
| `users` | json | List of users \(id, firstName, lastName, email, globalRole, isEnabled, updatedAt, managerId\) |
|
||||
| `interviewSchedules` | json | List of interview schedules \(id, applicationId, interviewStageId, interviewEvents\[\] with interviewerUserIds/startTime/endTime/feedbackLink/location/meetingLink/hasSubmittedFeedback, status, scheduledBy, createdAt, updatedAt\) |
|
||||
| `tags` | json | List of candidate tags \(id, title, isArchived\) |
|
||||
| `id` | string | Resource UUID |
|
||||
| `name` | string | Resource name |
|
||||
| `title` | string | Job title or job posting title |
|
||||
| `status` | string | Status |
|
||||
| `candidate` | json | Candidate details \(id, name, primaryEmailAddress, primaryPhoneNumber, emailAddresses\[\], phoneNumbers\[\], socialLinks\[\], customFields\[\], source, creditedToUser, createdAt, updatedAt\) |
|
||||
| `job` | json | Job details \(id, title, status, employmentType, locationId, departmentId, hiringTeam\[\], author, location, openings\[\], compensation, createdAt, updatedAt\) |
|
||||
| `application` | json | Application details \(id, status, customFields\[\], candidate, currentInterviewStage, source, archiveReason, job, hiringTeam\[\], createdAt, updatedAt\) |
|
||||
| `offer` | json | Offer details \(id, decidedAt, applicationId, acceptanceStatus, offerStatus, latestVersion\) |
|
||||
| `jobPosting` | json | Job posting details \(id, title, descriptionPlain, descriptionHtml, descriptionSocial, descriptionParts, departmentName, teamName, teamNameHierarchy\[\], jobId, locationName, locationIds, linkedData, address, isRemote, workplaceType, employmentType, isListed, publishedDate, applicationDeadline, externalLink, applyLink, compensation, updatedAt\) |
|
||||
| `content` | string | Note content |
|
||||
| `author` | json | Note author \(id, firstName, lastName, email, globalRole, isEnabled\) |
|
||||
| `isPrivate` | boolean | Whether the note is private |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `updatedAt` | string | ISO 8601 last update timestamp |
|
||||
| `moreDataAvailable` | boolean | Whether more pages exist |
|
||||
| `nextCursor` | string | Pagination cursor for next page |
|
||||
| `syncToken` | string | Sync token for incremental updates |
|
||||
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"index",
|
||||
"a2a",
|
||||
"agentmail",
|
||||
"agentphone",
|
||||
"agiloft",
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
|
||||
@@ -97,6 +97,14 @@ Trigger workflow when a candidate is hired
|
||||
| ↳ `job` | object | job output from the tool |
|
||||
| ↳ `id` | string | Job UUID |
|
||||
| ↳ `title` | string | Job title |
|
||||
| `offer` | object | offer output from the tool |
|
||||
| ↳ `id` | string | Accepted offer UUID |
|
||||
| ↳ `applicationId` | string | Associated application UUID |
|
||||
| ↳ `acceptanceStatus` | string | Offer acceptance status |
|
||||
| ↳ `offerStatus` | string | Offer process status |
|
||||
| ↳ `decidedAt` | string | Offer decision timestamp \(ISO 8601\) |
|
||||
| ↳ `latestVersion` | object | latestVersion output from the tool |
|
||||
| ↳ `id` | string | Latest offer version UUID |
|
||||
|
||||
|
||||
---
|
||||
|
||||
30
apps/realtime/.env.example
Normal file
30
apps/realtime/.env.example
Normal file
@@ -0,0 +1,30 @@
|
||||
# Environment variables required by the @sim/realtime (Socket.IO) server.
|
||||
# These MUST match the corresponding values in apps/sim/.env for auth to work.
|
||||
# See apps/realtime/src/env.ts for the full zod schema.
|
||||
|
||||
# Core
|
||||
NODE_ENV=development
|
||||
PORT=3002
|
||||
|
||||
# Database — must point at the same Postgres as the main app
|
||||
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/simstudio
|
||||
|
||||
# Auth — shared with apps/sim (Better Auth "Shared Database Session" pattern)
|
||||
BETTER_AUTH_URL=http://localhost:3000
|
||||
BETTER_AUTH_SECRET=your_better_auth_secret_min_32_chars
|
||||
|
||||
# Internal RPC — shared with apps/sim
|
||||
INTERNAL_API_SECRET=your_internal_api_secret_min_32_chars
|
||||
|
||||
# Public app URL — used for CORS allow-list and base URL resolution
|
||||
NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
|
||||
# Optional: Redis for cross-pod room management
|
||||
# Leave unset for single-pod / in-memory rooms
|
||||
# REDIS_URL=redis://localhost:6379
|
||||
|
||||
# Optional: extra Socket.IO CORS allow-list (comma-separated)
|
||||
# ALLOWED_ORIGINS=https://embed.example.com,https://admin.example.com
|
||||
|
||||
# Optional: disable auth entirely for trusted private networks
|
||||
# DISABLE_AUTH=true
|
||||
48
apps/realtime/package.json
Normal file
48
apps/realtime/package.json
Normal file
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"name": "@sim/realtime",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"type": "module",
|
||||
"engines": {
|
||||
"bun": ">=1.2.13",
|
||||
"node": ">=20.0.0"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "bun --watch src/index.ts",
|
||||
"start": "bun src/index.ts",
|
||||
"type-check": "tsc --noEmit",
|
||||
"lint": "biome check --write --unsafe .",
|
||||
"lint:check": "biome check .",
|
||||
"format": "biome format --write .",
|
||||
"format:check": "biome format .",
|
||||
"test": "vitest run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sim/audit": "workspace:*",
|
||||
"@sim/auth": "workspace:*",
|
||||
"@sim/db": "workspace:*",
|
||||
"@sim/logger": "workspace:*",
|
||||
"@sim/realtime-protocol": "workspace:*",
|
||||
"@sim/security": "workspace:*",
|
||||
"@sim/utils": "workspace:*",
|
||||
"@sim/workflow-authz": "workspace:*",
|
||||
"@sim/workflow-persistence": "workspace:*",
|
||||
"@sim/workflow-types": "workspace:*",
|
||||
"@socket.io/redis-adapter": "8.3.0",
|
||||
"drizzle-orm": "^0.45.2",
|
||||
"postgres": "^3.4.5",
|
||||
"redis": "5.10.0",
|
||||
"socket.io": "^4.8.1",
|
||||
"zod": "^3.24.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sim/testing": "workspace:*",
|
||||
"@sim/tsconfig": "workspace:*",
|
||||
"@types/node": "24.2.1",
|
||||
"socket.io-client": "4.8.1",
|
||||
"typescript": "^5.7.3",
|
||||
"vitest": "^3.0.8"
|
||||
}
|
||||
}
|
||||
17
apps/realtime/src/auth.ts
Normal file
17
apps/realtime/src/auth.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { createVerifyAuth } from '@sim/auth/verify'
|
||||
import { env } from '@/env'
|
||||
|
||||
export const ANONYMOUS_USER_ID = '00000000-0000-0000-0000-000000000000'
|
||||
|
||||
export const ANONYMOUS_USER = {
|
||||
id: ANONYMOUS_USER_ID,
|
||||
name: 'Anonymous',
|
||||
email: 'anonymous@localhost',
|
||||
emailVerified: true,
|
||||
image: null,
|
||||
} as const
|
||||
|
||||
export const auth = createVerifyAuth({
|
||||
secret: env.BETTER_AUTH_SECRET,
|
||||
baseURL: env.BETTER_AUTH_URL,
|
||||
})
|
||||
@@ -3,9 +3,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { createAdapter } from '@socket.io/redis-adapter'
|
||||
import { createClient, type RedisClientType } from 'redis'
|
||||
import { Server } from 'socket.io'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { isProd } from '@/lib/core/config/feature-flags'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { env, getBaseUrl, isProd } from '@/env'
|
||||
|
||||
const logger = createLogger('SocketIOConfig')
|
||||
|
||||
@@ -1,15 +1,7 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import * as schema from '@sim/db'
|
||||
import { webhook, workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db'
|
||||
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull, or, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
|
||||
import { getActiveWorkflowContext } from '@/lib/workflows/active-context'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { mergeSubBlockValues } from '@/lib/workflows/subblocks'
|
||||
import {
|
||||
BLOCK_OPERATIONS,
|
||||
BLOCKS_OPERATIONS,
|
||||
@@ -19,7 +11,14 @@ import {
|
||||
SUBFLOW_OPERATIONS,
|
||||
VARIABLE_OPERATIONS,
|
||||
WORKFLOW_OPERATIONS,
|
||||
} from '@/socket/constants'
|
||||
} from '@sim/realtime-protocol/constants'
|
||||
import { getActiveWorkflowContext } from '@sim/workflow-authz'
|
||||
import { loadWorkflowFromNormalizedTablesRaw } from '@sim/workflow-persistence/load'
|
||||
import { mergeSubBlockValues } from '@sim/workflow-persistence/subblocks'
|
||||
import { and, eq, inArray, isNull, or, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
import { env } from '@/env'
|
||||
|
||||
const logger = createLogger('SocketDatabase')
|
||||
|
||||
@@ -29,7 +28,7 @@ const socketDb = drizzle(
|
||||
prepare: false,
|
||||
idle_timeout: 10,
|
||||
connect_timeout: 20,
|
||||
max: 10,
|
||||
max: 30,
|
||||
onnotice: () => {},
|
||||
}),
|
||||
{ schema }
|
||||
@@ -182,7 +181,7 @@ export async function getWorkflowState(workflowId: string) {
|
||||
throw new Error(`Workflow ${workflowId} not found`)
|
||||
}
|
||||
|
||||
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
|
||||
const normalizedData = await loadWorkflowFromNormalizedTablesRaw(workflowId)
|
||||
|
||||
if (normalizedData) {
|
||||
const finalState = {
|
||||
@@ -915,30 +914,10 @@ async function handleBlocksOperationTx(
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up external webhooks
|
||||
const webhooksToCleanup = await tx
|
||||
.select({
|
||||
webhook: webhook,
|
||||
workflow: {
|
||||
id: workflow.id,
|
||||
userId: workflow.userId,
|
||||
workspaceId: workflow.workspaceId,
|
||||
},
|
||||
})
|
||||
.from(webhook)
|
||||
.innerJoin(workflow, eq(webhook.workflowId, workflow.id))
|
||||
.where(and(eq(webhook.workflowId, workflowId), inArray(webhook.blockId, blockIdsArray)))
|
||||
|
||||
if (webhooksToCleanup.length > 0) {
|
||||
const requestId = `socket-batch-${workflowId}-${Date.now()}`
|
||||
for (const { webhook: wh, workflow: wf } of webhooksToCleanup) {
|
||||
try {
|
||||
await cleanupExternalWebhook(wh, wf, requestId)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to cleanup webhook ${wh.id}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Webhook rows are only created at deploy time (saveTriggerWebhooksForDeploy in
|
||||
// lib/webhooks/deploy.ts) with deploymentVersionId set; their external-subscription
|
||||
// lifecycle is managed by deploy.ts, lifecycle.ts, and the /api/webhooks/[id] route.
|
||||
// Removing a trigger block from the draft canvas does not touch any webhook rows.
|
||||
|
||||
// Delete edges connected to any of the blocks
|
||||
await tx
|
||||
44
apps/realtime/src/env.ts
Normal file
44
apps/realtime/src/env.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { z } from 'zod'
|
||||
|
||||
const EnvSchema = z.object({
|
||||
NODE_ENV: z.enum(['development', 'test', 'production']).default('development'),
|
||||
DATABASE_URL: z.string().url(),
|
||||
REDIS_URL: z.string().url().optional(),
|
||||
BETTER_AUTH_URL: z.string().url(),
|
||||
BETTER_AUTH_SECRET: z.string().min(32),
|
||||
INTERNAL_API_SECRET: z.string().min(32),
|
||||
NEXT_PUBLIC_APP_URL: z.string().url(),
|
||||
ALLOWED_ORIGINS: z.string().optional(),
|
||||
PORT: z.coerce.number().int().positive().default(3002),
|
||||
DISABLE_AUTH: z
|
||||
.string()
|
||||
.optional()
|
||||
.transform((value) => value === 'true' || value === '1'),
|
||||
})
|
||||
|
||||
function parseEnv() {
|
||||
const parsed = EnvSchema.safeParse(process.env)
|
||||
if (!parsed.success) {
|
||||
const formatted = parsed.error.format()
|
||||
throw new Error(`Invalid realtime server environment: ${JSON.stringify(formatted, null, 2)}`)
|
||||
}
|
||||
return parsed.data
|
||||
}
|
||||
|
||||
export const env = parseEnv()
|
||||
|
||||
export const isProd = env.NODE_ENV === 'production'
|
||||
export const isDev = env.NODE_ENV === 'development'
|
||||
export const isTest = env.NODE_ENV === 'test'
|
||||
|
||||
let appHostname = ''
|
||||
try {
|
||||
appHostname = new URL(env.NEXT_PUBLIC_APP_URL).hostname
|
||||
} catch {}
|
||||
export const isHosted = appHostname === 'sim.ai' || appHostname.endsWith('.sim.ai')
|
||||
|
||||
export const isAuthDisabled = env.DISABLE_AUTH === true && !isHosted
|
||||
|
||||
export function getBaseUrl(): string {
|
||||
return env.NEXT_PUBLIC_APP_URL
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { cleanupPendingSubblocksForSocket } from '@/socket/handlers/subblocks'
|
||||
import { cleanupPendingVariablesForSocket } from '@/socket/handlers/variables'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import { cleanupPendingSubblocksForSocket } from '@/handlers/subblocks'
|
||||
import { cleanupPendingVariablesForSocket } from '@/handlers/variables'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
const logger = createLogger('ConnectionHandlers')
|
||||
|
||||
17
apps/realtime/src/handlers/index.ts
Normal file
17
apps/realtime/src/handlers/index.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { setupConnectionHandlers } from '@/handlers/connection'
|
||||
import { setupOperationsHandlers } from '@/handlers/operations'
|
||||
import { setupPresenceHandlers } from '@/handlers/presence'
|
||||
import { setupSubblocksHandlers } from '@/handlers/subblocks'
|
||||
import { setupVariablesHandlers } from '@/handlers/variables'
|
||||
import { setupWorkflowHandlers } from '@/handlers/workflow'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
|
||||
setupWorkflowHandlers(socket, roomManager)
|
||||
setupOperationsHandlers(socket, roomManager)
|
||||
setupSubblocksHandlers(socket, roomManager)
|
||||
setupVariablesHandlers(socket, roomManager)
|
||||
setupPresenceHandlers(socket, roomManager)
|
||||
setupConnectionHandlers(socket, roomManager)
|
||||
}
|
||||
@@ -1,6 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { ZodError } from 'zod'
|
||||
import {
|
||||
BLOCK_OPERATIONS,
|
||||
BLOCKS_OPERATIONS,
|
||||
@@ -9,12 +7,14 @@ import {
|
||||
VARIABLE_OPERATIONS,
|
||||
type VariableOperation,
|
||||
WORKFLOW_OPERATIONS,
|
||||
} from '@/socket/constants'
|
||||
import { persistWorkflowOperation } from '@/socket/database/operations'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import { checkRolePermission } from '@/socket/middleware/permissions'
|
||||
import type { IRoomManager, UserSession } from '@/socket/rooms'
|
||||
import { WorkflowOperationSchema } from '@/socket/validation/schemas'
|
||||
} from '@sim/realtime-protocol/constants'
|
||||
import { WorkflowOperationSchema } from '@sim/realtime-protocol/schemas'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { ZodError } from 'zod'
|
||||
import { persistWorkflowOperation } from '@/database/operations'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import { checkRolePermission } from '@/middleware/permissions'
|
||||
import type { IRoomManager, UserSession } from '@/rooms'
|
||||
|
||||
const logger = createLogger('OperationsHandlers')
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
const logger = createLogger('PresenceHandlers')
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow, workflowBlocks } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { SUBBLOCK_OPERATIONS } from '@sim/realtime-protocol/constants'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { SUBBLOCK_OPERATIONS } from '@/socket/constants'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import { checkRolePermission } from '@/socket/middleware/permissions'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import { checkRolePermission } from '@/middleware/permissions'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
const logger = createLogger('SubblocksHandlers')
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { VARIABLE_OPERATIONS } from '@sim/realtime-protocol/constants'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { VARIABLE_OPERATIONS } from '@/socket/constants'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import { checkRolePermission } from '@/socket/middleware/permissions'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import { checkRolePermission } from '@/middleware/permissions'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
const logger = createLogger('VariablesHandlers')
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
const { mockGetWorkflowState, mockVerifyWorkflowAccess } = vi.hoisted(() => ({
|
||||
mockGetWorkflowState: vi.fn(),
|
||||
@@ -14,15 +14,15 @@ vi.mock('@sim/db', () => ({
|
||||
user: { image: 'image' },
|
||||
}))
|
||||
|
||||
vi.mock('@/socket/database/operations', () => ({
|
||||
vi.mock('@/database/operations', () => ({
|
||||
getWorkflowState: mockGetWorkflowState,
|
||||
}))
|
||||
|
||||
vi.mock('@/socket/middleware/permissions', () => ({
|
||||
vi.mock('@/middleware/permissions', () => ({
|
||||
verifyWorkflowAccess: mockVerifyWorkflowAccess,
|
||||
}))
|
||||
|
||||
import { setupWorkflowHandlers } from '@/socket/handlers/workflow'
|
||||
import { setupWorkflowHandlers } from '@/handlers/workflow'
|
||||
|
||||
interface JoinWorkflowPayload {
|
||||
workflowId: string
|
||||
@@ -1,10 +1,10 @@
|
||||
import { db, user } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { getWorkflowState } from '@/socket/database/operations'
|
||||
import type { AuthenticatedSocket } from '@/socket/middleware/auth'
|
||||
import { verifyWorkflowAccess } from '@/socket/middleware/permissions'
|
||||
import type { IRoomManager, UserPresence } from '@/socket/rooms'
|
||||
import { getWorkflowState } from '@/database/operations'
|
||||
import type { AuthenticatedSocket } from '@/middleware/auth'
|
||||
import { verifyWorkflowAccess } from '@/middleware/permissions'
|
||||
import type { IRoomManager, UserPresence } from '@/rooms'
|
||||
|
||||
const logger = createLogger('WorkflowHandlers')
|
||||
|
||||
@@ -4,21 +4,28 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createServer, request as httpRequest } from 'http'
|
||||
import { createEnvMock, createMockLogger } from '@sim/testing'
|
||||
import { createMockLogger } from '@sim/testing'
|
||||
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { createSocketIOServer } from '@/socket/config/socket'
|
||||
import { MemoryRoomManager } from '@/socket/rooms'
|
||||
import { createHttpHandler } from '@/socket/routes/http'
|
||||
import { createSocketIOServer } from '@/config/socket'
|
||||
import { MemoryRoomManager } from '@/rooms'
|
||||
import { createHttpHandler } from '@/routes/http'
|
||||
|
||||
vi.mock('@/lib/auth', () => ({
|
||||
vi.mock('@/auth', () => ({
|
||||
auth: {
|
||||
api: {
|
||||
verifyOneTimeToken: vi.fn(),
|
||||
},
|
||||
},
|
||||
ANONYMOUS_USER_ID: '00000000-0000-0000-0000-000000000000',
|
||||
ANONYMOUS_USER: {
|
||||
id: '00000000-0000-0000-0000-000000000000',
|
||||
name: 'Anonymous',
|
||||
email: 'anonymous@localhost',
|
||||
emailVerified: true,
|
||||
image: null,
|
||||
},
|
||||
}))
|
||||
|
||||
// Mock redis package to prevent actual Redis connections
|
||||
vi.mock('redis', () => ({
|
||||
createClient: vi.fn(() => ({
|
||||
on: vi.fn(),
|
||||
@@ -28,15 +35,27 @@ vi.mock('redis', () => ({
|
||||
})),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
vi.mock('@/env', () => ({
|
||||
env: {
|
||||
DATABASE_URL: 'postgres://localhost/test',
|
||||
NODE_ENV: 'test',
|
||||
REDIS_URL: undefined,
|
||||
})
|
||||
)
|
||||
BETTER_AUTH_URL: 'http://localhost:3000',
|
||||
BETTER_AUTH_SECRET: 'test-better-auth-secret-at-least-32-chars',
|
||||
INTERNAL_API_SECRET: 'test-internal-api-secret-at-least-32-chars',
|
||||
NEXT_PUBLIC_APP_URL: 'http://localhost:3000',
|
||||
PORT: 3002,
|
||||
DISABLE_AUTH: false,
|
||||
},
|
||||
isProd: false,
|
||||
isDev: false,
|
||||
isTest: true,
|
||||
isHosted: false,
|
||||
isAuthDisabled: false,
|
||||
getBaseUrl: () => 'http://localhost:3000',
|
||||
}))
|
||||
|
||||
vi.mock('@/socket/middleware/auth', () => ({
|
||||
vi.mock('@/middleware/auth', () => ({
|
||||
authenticateSocket: vi.fn((socket, next) => {
|
||||
socket.userId = 'test-user-id'
|
||||
socket.userName = 'Test User'
|
||||
@@ -45,7 +64,7 @@ vi.mock('@/socket/middleware/auth', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/socket/middleware/permissions', () => ({
|
||||
vi.mock('@/middleware/permissions', () => ({
|
||||
verifyWorkflowAccess: vi.fn().mockResolvedValue({
|
||||
hasAccess: true,
|
||||
role: 'admin',
|
||||
@@ -55,7 +74,7 @@ vi.mock('@/socket/middleware/permissions', () => ({
|
||||
}),
|
||||
}))
|
||||
|
||||
vi.mock('@/socket/database/operations', () => ({
|
||||
vi.mock('@/database/operations', () => ({
|
||||
getWorkflowState: vi.fn().mockResolvedValue({
|
||||
id: 'test-workflow',
|
||||
name: 'Test Workflow',
|
||||
@@ -275,13 +294,13 @@ describe('Socket Server Index Integration', () => {
|
||||
|
||||
describe('Module Integration', () => {
|
||||
it.concurrent('should properly import all extracted modules', async () => {
|
||||
const { createSocketIOServer } = await import('@/socket/config/socket')
|
||||
const { createHttpHandler } = await import('@/socket/routes/http')
|
||||
const { MemoryRoomManager, RedisRoomManager } = await import('@/socket/rooms')
|
||||
const { authenticateSocket } = await import('@/socket/middleware/auth')
|
||||
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
|
||||
const { getWorkflowState } = await import('@/socket/database/operations')
|
||||
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
|
||||
const { createSocketIOServer } = await import('@/config/socket')
|
||||
const { createHttpHandler } = await import('@/routes/http')
|
||||
const { MemoryRoomManager, RedisRoomManager } = await import('@/rooms')
|
||||
const { authenticateSocket } = await import('@/middleware/auth')
|
||||
const { verifyWorkflowAccess } = await import('@/middleware/permissions')
|
||||
const { getWorkflowState } = await import('@/database/operations')
|
||||
const { WorkflowOperationSchema } = await import('@sim/realtime-protocol/schemas')
|
||||
|
||||
expect(createSocketIOServer).toBeTypeOf('function')
|
||||
expect(createHttpHandler).toBeTypeOf('function')
|
||||
@@ -332,7 +351,7 @@ describe('Socket Server Index Integration', () => {
|
||||
|
||||
describe('Validation and Utils', () => {
|
||||
it.concurrent('should validate workflow operations', async () => {
|
||||
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
|
||||
const { WorkflowOperationSchema } = await import('@sim/realtime-protocol/schemas')
|
||||
|
||||
const validOperation = {
|
||||
operation: 'batch-add-blocks',
|
||||
@@ -358,7 +377,7 @@ describe('Socket Server Index Integration', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should validate batch-add-blocks with edges', async () => {
|
||||
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
|
||||
const { WorkflowOperationSchema } = await import('@sim/realtime-protocol/schemas')
|
||||
|
||||
const validOperationWithEdge = {
|
||||
operation: 'batch-add-blocks',
|
||||
@@ -393,7 +412,7 @@ describe('Socket Server Index Integration', () => {
|
||||
})
|
||||
|
||||
it.concurrent('should validate edge operations', async () => {
|
||||
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
|
||||
const { WorkflowOperationSchema } = await import('@sim/realtime-protocol/schemas')
|
||||
|
||||
const validEdgeOperation = {
|
||||
operation: 'add',
|
||||
@@ -410,7 +429,7 @@ describe('Socket Server Index Integration', () => {
|
||||
})
|
||||
|
||||
it('should validate subflow operations', async () => {
|
||||
const { WorkflowOperationSchema } = await import('@/socket/validation/schemas')
|
||||
const { WorkflowOperationSchema } = await import('@sim/realtime-protocol/schemas')
|
||||
|
||||
const validSubflowOperation = {
|
||||
operation: 'update',
|
||||
@@ -1,12 +1,12 @@
|
||||
import { createServer } from 'http'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { Server as SocketIOServer } from 'socket.io'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { createSocketIOServer, shutdownSocketIOAdapter } from '@/socket/config/socket'
|
||||
import { setupAllHandlers } from '@/socket/handlers'
|
||||
import { type AuthenticatedSocket, authenticateSocket } from '@/socket/middleware/auth'
|
||||
import { type IRoomManager, MemoryRoomManager, RedisRoomManager } from '@/socket/rooms'
|
||||
import { createHttpHandler } from '@/socket/routes/http'
|
||||
import { createSocketIOServer, shutdownSocketIOAdapter } from '@/config/socket'
|
||||
import { env } from '@/env'
|
||||
import { setupAllHandlers } from '@/handlers'
|
||||
import { type AuthenticatedSocket, authenticateSocket } from '@/middleware/auth'
|
||||
import { type IRoomManager, MemoryRoomManager, RedisRoomManager } from '@/rooms'
|
||||
import { createHttpHandler } from '@/routes/http'
|
||||
|
||||
const logger = createLogger('CollaborativeSocketServer')
|
||||
|
||||
@@ -29,7 +29,7 @@ async function createRoomManager(io: SocketIOServer): Promise<IRoomManager> {
|
||||
|
||||
async function main() {
|
||||
const httpServer = createServer()
|
||||
const PORT = Number(env.PORT || env.SOCKET_PORT || 3002)
|
||||
const PORT = env.PORT
|
||||
|
||||
logger.info('Starting Socket.IO server...', {
|
||||
port: PORT,
|
||||
@@ -1,9 +1,8 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import type { Socket } from 'socket.io'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { ANONYMOUS_USER, ANONYMOUS_USER_ID } from '@/lib/auth/constants'
|
||||
import { isAuthDisabled } from '@/lib/core/config/feature-flags'
|
||||
import { ANONYMOUS_USER, ANONYMOUS_USER_ID, auth } from '@/auth'
|
||||
import { isAuthDisabled } from '@/env'
|
||||
|
||||
const logger = createLogger('SocketAuth')
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
SOCKET_OPERATIONS,
|
||||
} from '@sim/testing'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { checkRolePermission } from '@/socket/middleware/permissions'
|
||||
import { checkRolePermission } from '@/middleware/permissions'
|
||||
|
||||
describe('checkRolePermission', () => {
|
||||
describe('admin role', () => {
|
||||
@@ -1,8 +1,6 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import {
|
||||
BLOCK_OPERATIONS,
|
||||
BLOCKS_OPERATIONS,
|
||||
@@ -12,7 +10,9 @@ import {
|
||||
SUBFLOW_OPERATIONS,
|
||||
VARIABLE_OPERATIONS,
|
||||
WORKFLOW_OPERATIONS,
|
||||
} from '@/socket/constants'
|
||||
} from '@sim/realtime-protocol/constants'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
|
||||
const logger = createLogger('SocketPermissions')
|
||||
|
||||
3
apps/realtime/src/rooms/index.ts
Normal file
3
apps/realtime/src/rooms/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export { MemoryRoomManager } from '@/rooms/memory-manager'
|
||||
export { RedisRoomManager } from '@/rooms/redis-manager'
|
||||
export type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/rooms/types'
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { Server } from 'socket.io'
|
||||
import type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/socket/rooms/types'
|
||||
import type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/rooms/types'
|
||||
|
||||
const logger = createLogger('MemoryRoomManager')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { createClient, type RedisClientType } from 'redis'
|
||||
import type { Server } from 'socket.io'
|
||||
import type { IRoomManager, UserPresence, UserSession } from '@/socket/rooms/types'
|
||||
import type { IRoomManager, UserPresence, UserSession } from '@/rooms/types'
|
||||
|
||||
const logger = createLogger('RedisRoomManager')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import type { IncomingMessage, ServerResponse } from 'http'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { safeCompare } from '@/lib/core/security/encryption'
|
||||
import type { IRoomManager } from '@/socket/rooms'
|
||||
import { safeCompare } from '@sim/security/compare'
|
||||
import { env } from '@/env'
|
||||
import type { IRoomManager } from '@/rooms'
|
||||
|
||||
interface Logger {
|
||||
info: (message: string, ...args: unknown[]) => void
|
||||
11
apps/realtime/tsconfig.json
Normal file
11
apps/realtime/tsconfig.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"extends": "@sim/tsconfig/base.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"@/*": ["src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules", "dist"]
|
||||
}
|
||||
27
apps/realtime/vitest.config.ts
Normal file
27
apps/realtime/vitest.config.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import path from 'node:path'
|
||||
import { defineConfig } from 'vitest/config'
|
||||
|
||||
export default defineConfig({
|
||||
test: {
|
||||
globals: true,
|
||||
environment: 'node',
|
||||
include: ['**/*.test.{ts,tsx}'],
|
||||
exclude: ['**/node_modules/**', '**/dist/**'],
|
||||
setupFiles: ['./vitest.setup.ts'],
|
||||
pool: 'threads',
|
||||
testTimeout: 10000,
|
||||
},
|
||||
resolve: {
|
||||
alias: [
|
||||
{
|
||||
find: '@sim/db',
|
||||
replacement: path.resolve(__dirname, '../../packages/db'),
|
||||
},
|
||||
{
|
||||
find: '@sim/logger',
|
||||
replacement: path.resolve(__dirname, '../../packages/logger/src'),
|
||||
},
|
||||
{ find: '@', replacement: path.resolve(__dirname, 'src') },
|
||||
],
|
||||
},
|
||||
})
|
||||
6
apps/realtime/vitest.setup.ts
Normal file
6
apps/realtime/vitest.setup.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
process.env.DATABASE_URL ??= 'postgres://localhost/test'
|
||||
process.env.NODE_ENV ??= 'test'
|
||||
process.env.BETTER_AUTH_URL ??= 'http://localhost:3000'
|
||||
process.env.BETTER_AUTH_SECRET ??= 'test-better-auth-secret-at-least-32-chars'
|
||||
process.env.INTERNAL_API_SECRET ??= 'test-internal-api-secret-at-least-32-chars'
|
||||
process.env.NEXT_PUBLIC_APP_URL ??= 'http://localhost:3000'
|
||||
@@ -26,6 +26,13 @@ apps/sim/
|
||||
└── triggers/ # Trigger definitions
|
||||
```
|
||||
|
||||
The Socket.IO collaborative-canvas server lives in a separate workspace at
|
||||
`apps/realtime/`. It shares DB + auth with `apps/sim` via the `@sim/*`
|
||||
packages. Do not add imports from `@/lib/webhooks/providers/*`, `@/executor/*`,
|
||||
`@/blocks/*`, or `@/tools/*` to any package consumed by `apps/realtime` —
|
||||
those heavyweight registries stay in this app. `apps/realtime` calls back
|
||||
into this app only over internal HTTP with `INTERNAL_API_SECRET`.
|
||||
|
||||
### Feature Organization
|
||||
|
||||
Features live under `app/workspace/[workspaceId]/`:
|
||||
|
||||
@@ -6,6 +6,7 @@ import type { ComponentType, SVGProps } from 'react'
|
||||
import {
|
||||
A2AIcon,
|
||||
AgentMailIcon,
|
||||
AgentPhoneIcon,
|
||||
AgiloftIcon,
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
@@ -204,6 +205,7 @@ type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
a2a: A2AIcon,
|
||||
agentmail: AgentMailIcon,
|
||||
agentphone: AgentPhoneIcon,
|
||||
agiloft: AgiloftIcon,
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
|
||||
@@ -208,6 +208,113 @@
|
||||
"integrationTypes": ["email", "communication"],
|
||||
"tags": ["messaging"]
|
||||
},
|
||||
{
|
||||
"type": "agentphone",
|
||||
"slug": "agentphone",
|
||||
"name": "AgentPhone",
|
||||
"description": "Provision numbers, send SMS and iMessage, and place voice calls with AgentPhone",
|
||||
"longDescription": "Give your workflow a phone. Provision SMS- and voice-enabled numbers, send messages and tapback reactions, place outbound voice calls, manage conversations and contacts, and track usage — all through a single AgentPhone API key.",
|
||||
"bgColor": "linear-gradient(135deg, #1a1a1a 0%, #0a2a14 100%)",
|
||||
"iconName": "AgentPhoneIcon",
|
||||
"docsUrl": "https://docs.sim.ai/tools/agentphone",
|
||||
"operations": [
|
||||
{
|
||||
"name": "Create Number",
|
||||
"description": "Provision a new SMS- and voice-enabled phone number"
|
||||
},
|
||||
{
|
||||
"name": "List Numbers",
|
||||
"description": "List all phone numbers provisioned for this AgentPhone account"
|
||||
},
|
||||
{
|
||||
"name": "Release Number",
|
||||
"description": "Release (delete) a phone number. This action is irreversible."
|
||||
},
|
||||
{
|
||||
"name": "Get Number Messages",
|
||||
"description": "Fetch messages received on a specific phone number"
|
||||
},
|
||||
{
|
||||
"name": "Create Call",
|
||||
"description": "Initiate an outbound voice call from an AgentPhone agent"
|
||||
},
|
||||
{
|
||||
"name": "List Calls",
|
||||
"description": "List voice calls for this AgentPhone account"
|
||||
},
|
||||
{
|
||||
"name": "Get Call",
|
||||
"description": "Fetch a call and its full transcript"
|
||||
},
|
||||
{
|
||||
"name": "Get Call Transcript",
|
||||
"description": "Get the full ordered transcript for a call"
|
||||
},
|
||||
{
|
||||
"name": "List Conversations",
|
||||
"description": "List conversations (message threads) for this AgentPhone account"
|
||||
},
|
||||
{
|
||||
"name": "Get Conversation",
|
||||
"description": "Get a conversation along with its recent messages"
|
||||
},
|
||||
{
|
||||
"name": "Update Conversation",
|
||||
"description": "Update conversation metadata (stored state). Pass null to clear existing metadata."
|
||||
},
|
||||
{
|
||||
"name": "Get Conversation Messages",
|
||||
"description": "Get paginated messages for a conversation"
|
||||
},
|
||||
{
|
||||
"name": "Send Message",
|
||||
"description": "Send an outbound SMS or iMessage from an AgentPhone agent"
|
||||
},
|
||||
{
|
||||
"name": "React to Message",
|
||||
"description": "Send an iMessage tapback reaction to a message (iMessage only)"
|
||||
},
|
||||
{
|
||||
"name": "Create Contact",
|
||||
"description": "Create a new contact in AgentPhone"
|
||||
},
|
||||
{
|
||||
"name": "List Contacts",
|
||||
"description": "List contacts for this AgentPhone account"
|
||||
},
|
||||
{
|
||||
"name": "Get Contact",
|
||||
"description": "Fetch a single contact by ID"
|
||||
},
|
||||
{
|
||||
"name": "Update Contact",
|
||||
"description": "Update a contact"
|
||||
},
|
||||
{
|
||||
"name": "Delete Contact",
|
||||
"description": "Delete a contact by ID"
|
||||
},
|
||||
{
|
||||
"name": "Get Usage",
|
||||
"description": "Retrieve current usage statistics for the AgentPhone account"
|
||||
},
|
||||
{
|
||||
"name": "Get Daily Usage",
|
||||
"description": "Get a daily breakdown of usage (messages, calls, webhooks) for the last N days"
|
||||
},
|
||||
{
|
||||
"name": "Get Monthly Usage",
|
||||
"description": "Get monthly usage aggregation (messages, calls, webhooks) for the last N months"
|
||||
}
|
||||
],
|
||||
"operationCount": 22,
|
||||
"triggers": [],
|
||||
"triggerCount": 0,
|
||||
"authType": "api-key",
|
||||
"category": "tools",
|
||||
"integrationTypes": ["communication", "developer-tools"],
|
||||
"tags": ["messaging", "automation"]
|
||||
},
|
||||
{
|
||||
"type": "agiloft",
|
||||
"slug": "agiloft",
|
||||
@@ -924,7 +1031,7 @@
|
||||
},
|
||||
{
|
||||
"name": "List Applications",
|
||||
"description": "Lists all applications in an Ashby organization with pagination and optional filters for status, job, candidate, and creation date."
|
||||
"description": "Lists all applications in an Ashby organization with pagination and optional filters for status, job, and creation date."
|
||||
},
|
||||
{
|
||||
"name": "Get Application",
|
||||
@@ -944,11 +1051,11 @@
|
||||
},
|
||||
{
|
||||
"name": "Add Candidate Tag",
|
||||
"description": "Adds a tag to a candidate in Ashby."
|
||||
"description": "Adds a tag to a candidate in Ashby and returns the updated candidate."
|
||||
},
|
||||
{
|
||||
"name": "Remove Candidate Tag",
|
||||
"description": "Removes a tag from a candidate in Ashby."
|
||||
"description": "Removes a tag from a candidate in Ashby and returns the updated candidate."
|
||||
},
|
||||
{
|
||||
"name": "Get Offer",
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { isSameOrigin } from '@/lib/core/utils/validation'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
@@ -12,7 +13,6 @@ import {
|
||||
getCanonicalScopesForProvider,
|
||||
getServiceAccountProviderForProviderId,
|
||||
} from '@/lib/oauth/utils'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -23,7 +23,7 @@ vi.mock('@/lib/webhooks/utils.server', () => ({
|
||||
syncAllWebhooksForCredentialSet: mockSyncAllWebhooksForCredentialSet,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
vi.mock('@sim/audit', () => auditMock)
|
||||
|
||||
import { POST } from '@/app/api/auth/oauth/disconnect/route'
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { account, credentialSet, credentialSetMember } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, like, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import crypto from 'crypto'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { safeCompare } from '@sim/security/compare'
|
||||
import { hmacSha256Hex } from '@sim/security/hmac'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
@@ -34,13 +35,9 @@ function validateHmac(searchParams: URLSearchParams, clientSecret: string): bool
|
||||
.map((key) => `${key}=${params[key]}`)
|
||||
.join('&')
|
||||
|
||||
const generatedHmac = crypto.createHmac('sha256', clientSecret).update(message).digest('hex')
|
||||
const generatedHmac = hmacSha256Hex(message, clientSecret)
|
||||
|
||||
try {
|
||||
return crypto.timingSafeEqual(Buffer.from(hmac, 'hex'), Buffer.from(generatedHmac, 'hex'))
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
return safeCompare(hmac, generatedHmac)
|
||||
}
|
||||
|
||||
export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
|
||||
@@ -32,7 +32,9 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
const returnUrl = request.nextUrl.searchParams.get('returnUrl')
|
||||
|
||||
if (!shopDomain) {
|
||||
const returnUrlParam = returnUrl ? encodeURIComponent(returnUrl) : ''
|
||||
const safeReturnUrl =
|
||||
returnUrl && isSameOrigin(returnUrl) ? encodeURIComponent(returnUrl) : ''
|
||||
const returnUrlJsLiteral = JSON.stringify(safeReturnUrl)
|
||||
return new NextResponse(
|
||||
`<!DOCTYPE html>
|
||||
<html>
|
||||
@@ -120,7 +122,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
</div>
|
||||
|
||||
<script>
|
||||
const returnUrl = '${returnUrlParam}';
|
||||
const returnUrl = ${returnUrlJsLiteral};
|
||||
function handleSubmit(e) {
|
||||
e.preventDefault();
|
||||
let shop = document.getElementById('shop').value.trim().toLowerCase();
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getCreditBalance } from '@/lib/billing/credits/balance'
|
||||
import { purchaseCredits } from '@/lib/billing/credits/purchase'
|
||||
|
||||
@@ -5,7 +5,11 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { recordUsage } from '@/lib/billing/core/usage-log'
|
||||
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
|
||||
import { BillingRouteOutcome } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { checkInternalApiKey } from '@/lib/copilot/request/http'
|
||||
import { withIncomingGoSpan } from '@/lib/copilot/request/otel'
|
||||
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { type AtomicClaimResult, billingIdempotency } from '@/lib/core/idempotency/service'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
@@ -28,8 +32,28 @@ const UpdateCostSchema = z.object({
|
||||
/**
|
||||
* POST /api/billing/update-cost
|
||||
* Update user cost with a pre-calculated cost value (internal API key auth required)
|
||||
*
|
||||
* Parented under the Go-side `sim.update_cost` span via W3C traceparent
|
||||
* propagation. Every mothership request that bills should therefore show
|
||||
* the Go client span AND this Sim server span sharing one trace, with
|
||||
* the actual usage/overage work nested below.
|
||||
*/
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
export const POST = withRouteHandler((req: NextRequest) =>
|
||||
withIncomingGoSpan(
|
||||
req.headers,
|
||||
TraceSpan.CopilotBillingUpdateCost,
|
||||
{
|
||||
[TraceAttr.HttpMethod]: 'POST',
|
||||
[TraceAttr.HttpRoute]: '/api/billing/update-cost',
|
||||
},
|
||||
async (span) => updateCostInner(req, span)
|
||||
)
|
||||
)
|
||||
|
||||
async function updateCostInner(
|
||||
req: NextRequest,
|
||||
span: import('@opentelemetry/api').Span
|
||||
): Promise<NextResponse> {
|
||||
const requestId = generateRequestId()
|
||||
const startTime = Date.now()
|
||||
let claim: AtomicClaimResult | null = null
|
||||
@@ -39,6 +63,8 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
logger.info(`[${requestId}] Update cost request started`)
|
||||
|
||||
if (!isBillingEnabled) {
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.BillingDisabled)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 200)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: 'Billing disabled, cost update skipped',
|
||||
@@ -54,6 +80,8 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
const authResult = checkInternalApiKey(req)
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Authentication failed: ${authResult.error}`)
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.AuthFailed)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 401)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -69,8 +97,9 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
if (!validation.success) {
|
||||
logger.warn(`[${requestId}] Invalid request body`, {
|
||||
errors: validation.error.issues,
|
||||
body,
|
||||
})
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.InvalidBody)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 400)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -85,6 +114,17 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
validation.data
|
||||
const isMcp = source === 'mcp_copilot'
|
||||
|
||||
span.setAttributes({
|
||||
[TraceAttr.UserId]: userId,
|
||||
[TraceAttr.GenAiRequestModel]: model,
|
||||
[TraceAttr.BillingSource]: source,
|
||||
[TraceAttr.BillingCostUsd]: cost,
|
||||
[TraceAttr.GenAiUsageInputTokens]: inputTokens,
|
||||
[TraceAttr.GenAiUsageOutputTokens]: outputTokens,
|
||||
[TraceAttr.BillingIsMcp]: isMcp,
|
||||
...(idempotencyKey ? { [TraceAttr.BillingIdempotencyKey]: idempotencyKey } : {}),
|
||||
})
|
||||
|
||||
claim = idempotencyKey
|
||||
? await billingIdempotency.atomicallyClaim('update-cost', idempotencyKey)
|
||||
: null
|
||||
@@ -95,6 +135,8 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
userId,
|
||||
source,
|
||||
})
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.DuplicateIdempotencyKey)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 409)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -159,6 +201,9 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
cost,
|
||||
})
|
||||
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.Billed)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 200)
|
||||
span.setAttribute(TraceAttr.BillingDurationMs, duration)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
@@ -193,6 +238,9 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
)
|
||||
}
|
||||
|
||||
span.setAttribute(TraceAttr.BillingOutcome, BillingRouteOutcome.InternalError)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 500)
|
||||
span.setAttribute(TraceAttr.BillingDurationMs, duration)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -202,4 +250,4 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -33,7 +33,7 @@ const mockPerformChatUndeploy = workflowsOrchestrationMockFns.mockPerformChatUnd
|
||||
const mockNotifySocketDeploymentChanged =
|
||||
workflowsOrchestrationMockFns.mockNotifySocketDeploymentChanged
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
vi.mock('@sim/audit', () => auditMock)
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
isHosted: false,
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { chat } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { isDev } from '@/lib/core/config/feature-flags'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
|
||||
@@ -40,7 +40,7 @@ vi.mock('@/serializer', () => ({
|
||||
Serializer: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/subblocks', () => ({
|
||||
vi.mock('@sim/workflow-persistence/subblocks', () => ({
|
||||
mergeSubblockStateWithValues: mockMergeSubblockStateWithValues,
|
||||
mergeSubBlockValues: mockMergeSubBlockValues,
|
||||
}))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { chat, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, eq, isNull } from 'drizzle-orm'
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import {
|
||||
@@ -9,7 +10,6 @@ import {
|
||||
validateAuthToken,
|
||||
} from '@/lib/core/security/deployment'
|
||||
import { decryptSecret } from '@/lib/core/security/encryption'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('ChatAuthUtils')
|
||||
|
||||
|
||||
@@ -2,6 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -33,13 +35,16 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
|
||||
const { name } = validationResult.data
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/generate`, {
|
||||
const res = await fetchGo(`${SIM_AGENT_API_URL}/api/validate-key/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify({ userId, name }),
|
||||
spanName: 'sim → go /api/validate-key/generate',
|
||||
operation: 'generate_api_key',
|
||||
attributes: { [TraceAttr.UserId]: userId },
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
|
||||
@@ -20,6 +20,22 @@ vi.mock('@/lib/core/config/env', () => createEnvMock({ COPILOT_API_KEY: 'test-ap
|
||||
|
||||
import { DELETE, GET } from '@/app/api/copilot/api-keys/route'
|
||||
|
||||
// `fetchGo` reads `response.status` and `response.headers.get('content-length')`
|
||||
// to stamp span attributes, so mock responses need both fields or the call
|
||||
// path throws before the route handler sees the body.
|
||||
function buildMockResponse(init: {
|
||||
ok: boolean
|
||||
status?: number
|
||||
json: () => Promise<unknown>
|
||||
}): Record<string, unknown> {
|
||||
return {
|
||||
ok: init.ok,
|
||||
status: init.status ?? (init.ok ? 200 : 500),
|
||||
headers: new Headers(),
|
||||
json: init.json,
|
||||
}
|
||||
}
|
||||
|
||||
describe('Copilot API Keys API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@@ -60,10 +76,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
},
|
||||
]
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockApiKeys),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockApiKeys),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -83,10 +101,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -101,10 +121,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve([]),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
await GET(request)
|
||||
@@ -127,11 +149,13 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 503,
|
||||
json: () => Promise.resolve({ error: 'Service unavailable' }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: false,
|
||||
status: 503,
|
||||
json: () => Promise.resolve({ error: 'Service unavailable' }),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -146,10 +170,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ invalid: 'response' }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ invalid: 'response' }),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -189,10 +215,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
},
|
||||
]
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockApiKeys),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve(mockApiKeys),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -207,10 +235,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.reject(new Error('Invalid JSON')),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.reject(new Error('Invalid JSON')),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys')
|
||||
const response = await GET(request)
|
||||
@@ -251,10 +281,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys?id=key-123')
|
||||
const response = await DELETE(request)
|
||||
@@ -281,11 +313,13 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ error: 'Key not found' }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: false,
|
||||
status: 404,
|
||||
json: () => Promise.resolve({ error: 'Key not found' }),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys?id=non-existent')
|
||||
const response = await DELETE(request)
|
||||
@@ -300,10 +334,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: false }),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys?id=key-123')
|
||||
const response = await DELETE(request)
|
||||
@@ -333,10 +369,12 @@ describe('Copilot API Keys API Route', () => {
|
||||
user: { id: 'user-123', email: 'test@example.com' },
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.reject(new Error('Invalid JSON')),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.reject(new Error('Invalid JSON')),
|
||||
})
|
||||
)
|
||||
|
||||
const request = new NextRequest('http://localhost:3000/api/copilot/api-keys?id=key-123')
|
||||
const response = await DELETE(request)
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -13,13 +15,16 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||
const res = await fetchGo(`${SIM_AGENT_API_URL}/api/validate-key/get-api-keys`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify({ userId }),
|
||||
spanName: 'sim → go /api/validate-key/get-api-keys',
|
||||
operation: 'get_api_keys',
|
||||
attributes: { [TraceAttr.UserId]: userId },
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -67,13 +72,16 @@ export const DELETE = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({ error: 'id is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||
const res = await fetchGo(`${SIM_AGENT_API_URL}/api/validate-key/delete`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify({ userId, apiKeyId: id }),
|
||||
spanName: 'sim → go /api/validate-key/delete',
|
||||
operation: 'delete_api_key',
|
||||
attributes: { [TraceAttr.UserId]: userId, [TraceAttr.ApiKeyId]: id },
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
|
||||
@@ -5,7 +5,11 @@ import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { CopilotValidateOutcome } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { checkInternalApiKey } from '@/lib/copilot/request/http'
|
||||
import { withIncomingGoSpan } from '@/lib/copilot/request/otel'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
const logger = createLogger('CopilotApiKeysValidate')
|
||||
@@ -14,55 +18,87 @@ const ValidateApiKeySchema = z.object({
|
||||
userId: z.string().min(1, 'userId is required'),
|
||||
})
|
||||
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
try {
|
||||
const auth = checkInternalApiKey(req)
|
||||
if (!auth.success) {
|
||||
return new NextResponse(null, { status: 401 })
|
||||
// Incoming-from-Go: extracts traceparent so this handler's work shows
|
||||
// up as a child of the Go-side `sim.validate_api_key` span in the same
|
||||
// trace. If there's no traceparent (manual curl / browser), the helper
|
||||
// falls back to a new root span.
|
||||
export const POST = withRouteHandler((req: NextRequest) =>
|
||||
withIncomingGoSpan(
|
||||
req.headers,
|
||||
TraceSpan.CopilotAuthValidateApiKey,
|
||||
{
|
||||
[TraceAttr.HttpMethod]: 'POST',
|
||||
[TraceAttr.HttpRoute]: '/api/copilot/api-keys/validate',
|
||||
},
|
||||
async (span) => {
|
||||
try {
|
||||
const auth = checkInternalApiKey(req)
|
||||
if (!auth.success) {
|
||||
span.setAttribute(
|
||||
TraceAttr.CopilotValidateOutcome,
|
||||
CopilotValidateOutcome.InternalAuthFailed
|
||||
)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 401)
|
||||
return new NextResponse(null, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await req.json().catch(() => null)
|
||||
const validationResult = ValidateApiKeySchema.safeParse(body)
|
||||
if (!validationResult.success) {
|
||||
logger.warn('Invalid validation request', { errors: validationResult.error.errors })
|
||||
span.setAttribute(TraceAttr.CopilotValidateOutcome, CopilotValidateOutcome.InvalidBody)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 400)
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'userId is required',
|
||||
details: validationResult.error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { userId } = validationResult.data
|
||||
span.setAttribute(TraceAttr.UserId, userId)
|
||||
|
||||
const [existingUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
|
||||
if (!existingUser) {
|
||||
logger.warn('[API VALIDATION] userId does not exist', { userId })
|
||||
span.setAttribute(TraceAttr.CopilotValidateOutcome, CopilotValidateOutcome.UserNotFound)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 403)
|
||||
return NextResponse.json({ error: 'User not found' }, { status: 403 })
|
||||
}
|
||||
|
||||
logger.info('[API VALIDATION] Validating usage limit', { userId })
|
||||
const { isExceeded, currentUsage, limit } = await checkServerSideUsageLimits(userId)
|
||||
span.setAttributes({
|
||||
[TraceAttr.BillingUsageCurrent]: currentUsage,
|
||||
[TraceAttr.BillingUsageLimit]: limit,
|
||||
[TraceAttr.BillingUsageExceeded]: isExceeded,
|
||||
})
|
||||
|
||||
logger.info('[API VALIDATION] Usage limit validated', {
|
||||
userId,
|
||||
currentUsage,
|
||||
limit,
|
||||
isExceeded,
|
||||
})
|
||||
|
||||
if (isExceeded) {
|
||||
logger.info('[API VALIDATION] Usage exceeded', { userId, currentUsage, limit })
|
||||
span.setAttribute(TraceAttr.CopilotValidateOutcome, CopilotValidateOutcome.UsageExceeded)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 402)
|
||||
return new NextResponse(null, { status: 402 })
|
||||
}
|
||||
|
||||
span.setAttribute(TraceAttr.CopilotValidateOutcome, CopilotValidateOutcome.Ok)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 200)
|
||||
return new NextResponse(null, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Error validating usage limit', { error })
|
||||
span.setAttribute(TraceAttr.CopilotValidateOutcome, CopilotValidateOutcome.InternalError)
|
||||
span.setAttribute(TraceAttr.HttpStatusCode, 500)
|
||||
return NextResponse.json({ error: 'Failed to validate usage' }, { status: 500 })
|
||||
}
|
||||
}
|
||||
|
||||
const body = await req.json().catch(() => null)
|
||||
|
||||
const validationResult = ValidateApiKeySchema.safeParse(body)
|
||||
|
||||
if (!validationResult.success) {
|
||||
logger.warn('Invalid validation request', { errors: validationResult.error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'userId is required',
|
||||
details: validationResult.error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { userId } = validationResult.data
|
||||
|
||||
const [existingUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
|
||||
if (!existingUser) {
|
||||
logger.warn('[API VALIDATION] userId does not exist', { userId })
|
||||
return NextResponse.json({ error: 'User not found' }, { status: 403 })
|
||||
}
|
||||
|
||||
logger.info('[API VALIDATION] Validating usage limit', { userId })
|
||||
|
||||
const { isExceeded, currentUsage, limit } = await checkServerSideUsageLimits(userId)
|
||||
|
||||
logger.info('[API VALIDATION] Usage limit validated', {
|
||||
userId,
|
||||
currentUsage,
|
||||
limit,
|
||||
isExceeded,
|
||||
})
|
||||
|
||||
if (isExceeded) {
|
||||
logger.info('[API VALIDATION] Usage exceeded', { userId, currentUsage, limit })
|
||||
return new NextResponse(null, { status: 402 })
|
||||
}
|
||||
|
||||
return new NextResponse(null, { status: 200 })
|
||||
} catch (error) {
|
||||
logger.error('Error validating usage limit', { error })
|
||||
return NextResponse.json({ error: 'Failed to validate usage' }, { status: 500 })
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2,6 +2,8 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -31,9 +33,15 @@ export const GET = withRouteHandler(async () => {
|
||||
|
||||
const userId = session.user.id
|
||||
|
||||
const res = await fetch(
|
||||
const res = await fetchGo(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}`,
|
||||
{ method: 'GET', headers: copilotHeaders() }
|
||||
{
|
||||
method: 'GET',
|
||||
headers: copilotHeaders(),
|
||||
spanName: 'sim → go /api/tool-preferences/auto-allowed',
|
||||
operation: 'list_auto_allowed_tools',
|
||||
attributes: { [TraceAttr.UserId]: userId },
|
||||
}
|
||||
)
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -67,10 +75,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({ error: 'toolId must be a string' }, { status: 400 })
|
||||
}
|
||||
|
||||
const res = await fetch(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
const res = await fetchGo(`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed`, {
|
||||
method: 'POST',
|
||||
headers: copilotHeaders(),
|
||||
body: JSON.stringify({ userId, toolId: body.toolId }),
|
||||
spanName: 'sim → go /api/tool-preferences/auto-allowed',
|
||||
operation: 'add_auto_allowed_tool',
|
||||
attributes: { [TraceAttr.UserId]: userId, [TraceAttr.ToolId]: body.toolId },
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
@@ -108,9 +119,15 @@ export const DELETE = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({ error: 'toolId query parameter is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const res = await fetch(
|
||||
const res = await fetchGo(
|
||||
`${SIM_AGENT_API_URL}/api/tool-preferences/auto-allowed?userId=${encodeURIComponent(userId)}&toolId=${encodeURIComponent(toolId)}`,
|
||||
{ method: 'DELETE', headers: copilotHeaders() }
|
||||
{
|
||||
method: 'DELETE',
|
||||
headers: copilotHeaders(),
|
||||
spanName: 'sim → go /api/tool-preferences/auto-allowed',
|
||||
operation: 'remove_auto_allowed_tool',
|
||||
attributes: { [TraceAttr.UserId]: userId, [TraceAttr.ToolId]: toolId },
|
||||
}
|
||||
)
|
||||
|
||||
if (!res.ok) {
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { CopilotAbortOutcome } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
|
||||
import { withCopilotSpan, withIncomingGoSpan } from '@/lib/copilot/request/otel'
|
||||
import { abortActiveStream, waitForPendingChatStream } from '@/lib/copilot/request/session'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
@@ -12,81 +16,136 @@ const logger = createLogger('CopilotChatAbortAPI')
|
||||
const GO_EXPLICIT_ABORT_TIMEOUT_MS = 3000
|
||||
const STREAM_ABORT_SETTLE_TIMEOUT_MS = 8000
|
||||
|
||||
export const POST = withRouteHandler(async (request: Request) => {
|
||||
const { userId: authenticatedUserId, isAuthenticated } =
|
||||
await authenticateCopilotRequestSessionOnly()
|
||||
// POST /api/copilot/chat/abort — fires on user Stop; marks the Go
|
||||
// side aborted then waits for the prior stream to settle.
|
||||
export const POST = withRouteHandler((request: NextRequest) =>
|
||||
withIncomingGoSpan(
|
||||
request.headers,
|
||||
TraceSpan.CopilotChatAbortStream,
|
||||
undefined,
|
||||
async (rootSpan) => {
|
||||
const { userId: authenticatedUserId, isAuthenticated } =
|
||||
await authenticateCopilotRequestSessionOnly()
|
||||
|
||||
if (!isAuthenticated || !authenticatedUserId) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
if (!isAuthenticated || !authenticatedUserId) {
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortOutcome, CopilotAbortOutcome.Unauthorized)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json().catch((err) => {
|
||||
logger.warn('Abort request body parse failed; continuing with empty object', {
|
||||
error: toError(err).message,
|
||||
})
|
||||
return {}
|
||||
})
|
||||
const streamId = typeof body.streamId === 'string' ? body.streamId : ''
|
||||
let chatId = typeof body.chatId === 'string' ? body.chatId : ''
|
||||
|
||||
if (!streamId) {
|
||||
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!chatId) {
|
||||
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch((err) => {
|
||||
logger.warn('getLatestRunForStream failed while resolving chatId for abort', {
|
||||
streamId,
|
||||
error: toError(err).message,
|
||||
const body = await request.json().catch((err) => {
|
||||
logger.warn('Abort request body parse failed; continuing with empty object', {
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return {}
|
||||
})
|
||||
return null
|
||||
})
|
||||
if (run?.chatId) {
|
||||
chatId = run.chatId
|
||||
}
|
||||
}
|
||||
const streamId = typeof body.streamId === 'string' ? body.streamId : ''
|
||||
let chatId = typeof body.chatId === 'string' ? body.chatId : ''
|
||||
|
||||
try {
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(
|
||||
() => controller.abort('timeout:go_explicit_abort_fetch'),
|
||||
GO_EXPLICIT_ABORT_TIMEOUT_MS
|
||||
)
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/streams/explicit-abort`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
body: JSON.stringify({
|
||||
messageId: streamId,
|
||||
userId: authenticatedUserId,
|
||||
...(chatId ? { chatId } : {}),
|
||||
}),
|
||||
}).finally(() => clearTimeout(timeout))
|
||||
if (!response.ok) {
|
||||
throw new Error(`Explicit abort marker request failed: ${response.status}`)
|
||||
}
|
||||
} catch (err) {
|
||||
logger.warn('Explicit abort marker request failed; proceeding with local abort', {
|
||||
streamId,
|
||||
error: toError(err).message,
|
||||
})
|
||||
}
|
||||
if (!streamId) {
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortOutcome, CopilotAbortOutcome.MissingStreamId)
|
||||
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||
}
|
||||
rootSpan.setAttributes({
|
||||
[TraceAttr.StreamId]: streamId,
|
||||
[TraceAttr.UserId]: authenticatedUserId,
|
||||
})
|
||||
|
||||
const aborted = await abortActiveStream(streamId)
|
||||
if (chatId) {
|
||||
const settled = await waitForPendingChatStream(chatId, STREAM_ABORT_SETTLE_TIMEOUT_MS, streamId)
|
||||
if (!settled) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Previous response is still shutting down', aborted, settled: false },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
return NextResponse.json({ aborted, settled: true })
|
||||
}
|
||||
if (!chatId) {
|
||||
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch((err) => {
|
||||
logger.warn('getLatestRunForStream failed while resolving chatId for abort', {
|
||||
streamId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
})
|
||||
if (run?.chatId) {
|
||||
chatId = run.chatId
|
||||
}
|
||||
}
|
||||
if (chatId) rootSpan.setAttribute(TraceAttr.ChatId, chatId)
|
||||
|
||||
return NextResponse.json({ aborted })
|
||||
})
|
||||
// Local abort before Go — lets the lifecycle classifier see
|
||||
// `signal.aborted` with an explicit-stop reason before Go's
|
||||
// context-canceled error propagates back. Go's endpoint runs
|
||||
// second for billing-ledger flush; Go's context is already
|
||||
// cancelled by then.
|
||||
const aborted = await abortActiveStream(streamId)
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortLocalAborted, aborted)
|
||||
|
||||
let goAbortOk = false
|
||||
try {
|
||||
const headers: Record<string, string> = { 'Content-Type': 'application/json' }
|
||||
if (env.COPILOT_API_KEY) {
|
||||
headers['x-api-key'] = env.COPILOT_API_KEY
|
||||
}
|
||||
const controller = new AbortController()
|
||||
const timeout = setTimeout(
|
||||
() => controller.abort('timeout:go_explicit_abort_fetch'),
|
||||
GO_EXPLICIT_ABORT_TIMEOUT_MS
|
||||
)
|
||||
const response = await fetchGo(`${SIM_AGENT_API_URL}/api/streams/explicit-abort`, {
|
||||
method: 'POST',
|
||||
headers,
|
||||
signal: controller.signal,
|
||||
body: JSON.stringify({
|
||||
messageId: streamId,
|
||||
userId: authenticatedUserId,
|
||||
...(chatId ? { chatId } : {}),
|
||||
}),
|
||||
spanName: 'sim → go /api/streams/explicit-abort',
|
||||
operation: 'explicit_abort',
|
||||
attributes: {
|
||||
[TraceAttr.StreamId]: streamId,
|
||||
...(chatId ? { [TraceAttr.ChatId]: chatId } : {}),
|
||||
},
|
||||
}).finally(() => clearTimeout(timeout))
|
||||
if (!response.ok) {
|
||||
throw new Error(`Explicit abort marker request failed: ${response.status}`)
|
||||
}
|
||||
goAbortOk = true
|
||||
} catch (err) {
|
||||
logger.warn('Explicit abort marker request failed after local abort', {
|
||||
streamId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
}
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortGoMarkerOk, goAbortOk)
|
||||
|
||||
if (chatId) {
|
||||
const settled = await withCopilotSpan(
|
||||
TraceSpan.CopilotChatAbortWaitSettle,
|
||||
{
|
||||
[TraceAttr.ChatId]: chatId,
|
||||
[TraceAttr.StreamId]: streamId,
|
||||
[TraceAttr.SettleTimeoutMs]: STREAM_ABORT_SETTLE_TIMEOUT_MS,
|
||||
},
|
||||
async (settleSpan) => {
|
||||
const start = Date.now()
|
||||
const ok = await waitForPendingChatStream(
|
||||
chatId,
|
||||
STREAM_ABORT_SETTLE_TIMEOUT_MS,
|
||||
streamId
|
||||
)
|
||||
settleSpan.setAttributes({
|
||||
[TraceAttr.SettleWaitMs]: Date.now() - start,
|
||||
[TraceAttr.SettleCompleted]: ok,
|
||||
})
|
||||
return ok
|
||||
}
|
||||
)
|
||||
if (!settled) {
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortOutcome, CopilotAbortOutcome.SettleTimeout)
|
||||
return NextResponse.json(
|
||||
{ error: 'Previous response is still shutting down', aborted, settled: false },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortOutcome, CopilotAbortOutcome.Settled)
|
||||
return NextResponse.json({ aborted, settled: true })
|
||||
}
|
||||
|
||||
rootSpan.setAttribute(TraceAttr.CopilotAbortOutcome, CopilotAbortOutcome.NoChatId)
|
||||
return NextResponse.json({ aborted })
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ import { db } from '@sim/db'
|
||||
import { copilotChats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
|
||||
@@ -17,7 +18,6 @@ import {
|
||||
import { readFilePreviewSessions } from '@/lib/copilot/request/session'
|
||||
import { readEvents } from '@/lib/copilot/request/session/buffer'
|
||||
import { toStreamBatchEvent } from '@/lib/copilot/request/session/types'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('CopilotChatAPI')
|
||||
|
||||
@@ -7,6 +7,10 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { normalizeMessage, type PersistedMessage } from '@/lib/copilot/chat/persisted-message'
|
||||
import { CopilotStopOutcome } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { withIncomingGoSpan } from '@/lib/copilot/request/otel'
|
||||
import { taskPubSub } from '@/lib/copilot/tasks'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -48,6 +52,8 @@ const ContentBlockSchema = z.object({
|
||||
lifecycle: z.enum(['start', 'end']).optional(),
|
||||
status: z.enum(['complete', 'error', 'cancelled']).optional(),
|
||||
toolCall: StoredToolCallSchema.optional(),
|
||||
timestamp: z.number().optional(),
|
||||
endedAt: z.number().optional(),
|
||||
})
|
||||
|
||||
const StopSchema = z.object({
|
||||
@@ -55,95 +61,121 @@ const StopSchema = z.object({
|
||||
streamId: z.string(),
|
||||
content: z.string(),
|
||||
contentBlocks: z.array(ContentBlockSchema).optional(),
|
||||
// Optional for older clients; when present, flows into msg.requestId
|
||||
// so the UI's copy-request-ID button survives a stopped turn.
|
||||
requestId: z.string().optional(),
|
||||
})
|
||||
|
||||
/**
|
||||
* POST /api/copilot/chat/stop
|
||||
* Persists partial assistant content when the user stops a stream mid-response.
|
||||
* Clears conversationId so the server-side onComplete won't duplicate the message.
|
||||
* The chat stream lock is intentionally left alone here; it is released only once
|
||||
* the aborted server stream actually unwinds.
|
||||
*/
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
// POST /api/copilot/chat/stop — persists partial assistant content
|
||||
// when the user stops mid-stream. Lock release is handled by the
|
||||
// aborted server stream unwinding, not this handler.
|
||||
export const POST = withRouteHandler((req: NextRequest) =>
|
||||
withIncomingGoSpan(req.headers, TraceSpan.CopilotChatStopStream, undefined, async (span) => {
|
||||
try {
|
||||
const session = await getSession()
|
||||
if (!session?.user?.id) {
|
||||
span.setAttribute(TraceAttr.CopilotStopOutcome, CopilotStopOutcome.Unauthorized)
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const { chatId, streamId, content, contentBlocks } = StopSchema.parse(await req.json())
|
||||
const [row] = await db
|
||||
.select({
|
||||
workspaceId: copilotChats.workspaceId,
|
||||
messages: copilotChats.messages,
|
||||
const { chatId, streamId, content, contentBlocks, requestId } = StopSchema.parse(
|
||||
await req.json()
|
||||
)
|
||||
span.setAttributes({
|
||||
[TraceAttr.ChatId]: chatId,
|
||||
[TraceAttr.StreamId]: streamId,
|
||||
[TraceAttr.UserId]: session.user.id,
|
||||
[TraceAttr.CopilotStopContentLength]: content.length,
|
||||
[TraceAttr.CopilotStopBlocksCount]: contentBlocks?.length ?? 0,
|
||||
...(requestId ? { [TraceAttr.RequestId]: requestId } : {}),
|
||||
})
|
||||
.from(copilotChats)
|
||||
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (!row) {
|
||||
const [row] = await db
|
||||
.select({
|
||||
workspaceId: copilotChats.workspaceId,
|
||||
messages: copilotChats.messages,
|
||||
})
|
||||
.from(copilotChats)
|
||||
.where(and(eq(copilotChats.id, chatId), eq(copilotChats.userId, session.user.id)))
|
||||
.limit(1)
|
||||
|
||||
if (!row) {
|
||||
span.setAttribute(TraceAttr.CopilotStopOutcome, CopilotStopOutcome.ChatNotFound)
|
||||
return NextResponse.json({ success: true })
|
||||
}
|
||||
|
||||
const messages: Record<string, unknown>[] = Array.isArray(row.messages) ? row.messages : []
|
||||
const userIdx = messages.findIndex((message) => message.id === streamId)
|
||||
const alreadyHasResponse =
|
||||
userIdx >= 0 &&
|
||||
userIdx + 1 < messages.length &&
|
||||
(messages[userIdx + 1] as Record<string, unknown>)?.role === 'assistant'
|
||||
const canAppendAssistant =
|
||||
userIdx >= 0 && userIdx === messages.length - 1 && !alreadyHasResponse
|
||||
|
||||
const updateWhere = and(
|
||||
eq(copilotChats.id, chatId),
|
||||
eq(copilotChats.userId, session.user.id),
|
||||
eq(copilotChats.conversationId, streamId)
|
||||
)
|
||||
|
||||
const setClause: Record<string, unknown> = {
|
||||
conversationId: null,
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
const hasContent = content.trim().length > 0
|
||||
const hasBlocks = Array.isArray(contentBlocks) && contentBlocks.length > 0
|
||||
const synthesizedStoppedBlocks = hasBlocks
|
||||
? contentBlocks
|
||||
: hasContent
|
||||
? [{ type: 'text', channel: 'assistant', content }, { type: 'stopped' }]
|
||||
: [{ type: 'stopped' }]
|
||||
if (canAppendAssistant) {
|
||||
const normalized = normalizeMessage({
|
||||
id: generateId(),
|
||||
role: 'assistant',
|
||||
content,
|
||||
timestamp: new Date().toISOString(),
|
||||
contentBlocks: synthesizedStoppedBlocks,
|
||||
// Persist so the UI copy-request-id button survives refetch.
|
||||
...(requestId ? { requestId } : {}),
|
||||
})
|
||||
const assistantMessage: PersistedMessage = normalized
|
||||
setClause.messages = sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`
|
||||
}
|
||||
span.setAttribute(TraceAttr.CopilotStopAppendedAssistant, canAppendAssistant)
|
||||
|
||||
const [updated] = await db
|
||||
.update(copilotChats)
|
||||
.set(setClause)
|
||||
.where(updateWhere)
|
||||
.returning({ workspaceId: copilotChats.workspaceId })
|
||||
|
||||
if (updated?.workspaceId) {
|
||||
taskPubSub?.publishStatusChanged({
|
||||
workspaceId: updated.workspaceId,
|
||||
chatId,
|
||||
type: 'completed',
|
||||
})
|
||||
}
|
||||
|
||||
span.setAttribute(
|
||||
TraceAttr.CopilotStopOutcome,
|
||||
updated ? CopilotStopOutcome.Persisted : CopilotStopOutcome.NoMatchingRow
|
||||
)
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
span.setAttribute(TraceAttr.CopilotStopOutcome, CopilotStopOutcome.ValidationError)
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid request data', details: error.errors },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
logger.error('Error stopping chat stream:', error)
|
||||
span.setAttribute(TraceAttr.CopilotStopOutcome, CopilotStopOutcome.InternalError)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
|
||||
const messages: Record<string, unknown>[] = Array.isArray(row.messages) ? row.messages : []
|
||||
const userIdx = messages.findIndex((message) => message.id === streamId)
|
||||
const alreadyHasResponse =
|
||||
userIdx >= 0 &&
|
||||
userIdx + 1 < messages.length &&
|
||||
(messages[userIdx + 1] as Record<string, unknown>)?.role === 'assistant'
|
||||
const canAppendAssistant =
|
||||
userIdx >= 0 && userIdx === messages.length - 1 && !alreadyHasResponse
|
||||
|
||||
const updateWhere = and(
|
||||
eq(copilotChats.id, chatId),
|
||||
eq(copilotChats.userId, session.user.id),
|
||||
eq(copilotChats.conversationId, streamId)
|
||||
)
|
||||
|
||||
const setClause: Record<string, unknown> = {
|
||||
conversationId: null,
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
const hasContent = content.trim().length > 0
|
||||
const hasBlocks = Array.isArray(contentBlocks) && contentBlocks.length > 0
|
||||
const synthesizedStoppedBlocks = hasBlocks
|
||||
? contentBlocks
|
||||
: hasContent
|
||||
? [{ type: 'text', channel: 'assistant', content }, { type: 'stopped' }]
|
||||
: [{ type: 'stopped' }]
|
||||
if (canAppendAssistant) {
|
||||
const normalized = normalizeMessage({
|
||||
id: generateId(),
|
||||
role: 'assistant',
|
||||
content,
|
||||
timestamp: new Date().toISOString(),
|
||||
contentBlocks: synthesizedStoppedBlocks,
|
||||
})
|
||||
const assistantMessage: PersistedMessage = normalized
|
||||
setClause.messages = sql`${copilotChats.messages} || ${JSON.stringify([assistantMessage])}::jsonb`
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(copilotChats)
|
||||
.set(setClause)
|
||||
.where(updateWhere)
|
||||
.returning({ workspaceId: copilotChats.workspaceId })
|
||||
|
||||
if (updated?.workspaceId) {
|
||||
taskPubSub?.publishStatusChanged({
|
||||
workspaceId: updated.workspaceId,
|
||||
chatId,
|
||||
type: 'completed',
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json({ success: true })
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
return NextResponse.json({ error: 'Invalid request' }, { status: 400 })
|
||||
}
|
||||
logger.error('Error stopping chat stream:', error)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
@@ -38,6 +38,7 @@ vi.mock('@/lib/copilot/request/session', () => ({
|
||||
}),
|
||||
encodeSSEEnvelope: (event: Record<string, unknown>) =>
|
||||
new TextEncoder().encode(`data: ${JSON.stringify(event)}\n\n`),
|
||||
encodeSSEComment: (comment: string) => new TextEncoder().encode(`: ${comment}\n\n`),
|
||||
SSE_RESPONSE_HEADERS: {
|
||||
'Content-Type': 'text/event-stream',
|
||||
},
|
||||
@@ -132,6 +133,7 @@ describe('copilot chat stream replay route', () => {
|
||||
)
|
||||
|
||||
const chunks = await readAllChunks(response)
|
||||
expect(chunks[0]).toBe(': accepted\n\n')
|
||||
expect(chunks.join('')).toContain(
|
||||
JSON.stringify({
|
||||
status: MothershipStreamV1CompletionStatus.cancelled,
|
||||
@@ -160,4 +162,42 @@ describe('copilot chat stream replay route', () => {
|
||||
expect(body).toContain('"code":"resume_run_unavailable"')
|
||||
expect(body).toContain(`"type":"${MothershipStreamV1EventType.complete}"`)
|
||||
})
|
||||
|
||||
it('uses the latest live request id for synthetic terminal replay events', async () => {
|
||||
getLatestRunForStream
|
||||
.mockResolvedValueOnce({
|
||||
status: 'active',
|
||||
executionId: 'exec-1',
|
||||
id: 'run-1',
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
status: 'cancelled',
|
||||
executionId: 'exec-1',
|
||||
id: 'run-1',
|
||||
})
|
||||
readEvents
|
||||
.mockResolvedValueOnce([
|
||||
{
|
||||
stream: { streamId: 'stream-1', cursor: '1' },
|
||||
seq: 1,
|
||||
trace: { requestId: 'req-live-123' },
|
||||
type: MothershipStreamV1EventType.text,
|
||||
payload: {
|
||||
channel: 'assistant',
|
||||
text: 'hello',
|
||||
},
|
||||
},
|
||||
])
|
||||
.mockResolvedValueOnce([])
|
||||
|
||||
const response = await GET(
|
||||
new NextRequest('http://localhost:3000/api/copilot/chat/stream?streamId=stream-1&after=0')
|
||||
)
|
||||
|
||||
const chunks = await readAllChunks(response)
|
||||
const terminalChunk = chunks[chunks.length - 1] ?? ''
|
||||
expect(terminalChunk).toContain(`"type":"${MothershipStreamV1EventType.complete}"`)
|
||||
expect(terminalChunk).toContain('"requestId":"req-live-123"')
|
||||
expect(terminalChunk).toContain('"status":"cancelled"')
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { context as otelContext, trace } from '@opentelemetry/api'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { sleep } from '@sim/utils/helpers'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getLatestRunForStream } from '@/lib/copilot/async-runs/repository'
|
||||
@@ -7,10 +7,19 @@ import {
|
||||
MothershipStreamV1CompletionStatus,
|
||||
MothershipStreamV1EventType,
|
||||
} from '@/lib/copilot/generated/mothership-stream-v1'
|
||||
import {
|
||||
CopilotResumeOutcome,
|
||||
CopilotTransport,
|
||||
} from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { contextFromRequestHeaders } from '@/lib/copilot/request/go/propagation'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
|
||||
import { getCopilotTracer, markSpanForError } from '@/lib/copilot/request/otel'
|
||||
import {
|
||||
checkForReplayGap,
|
||||
createEvent,
|
||||
encodeSSEComment,
|
||||
encodeSSEEnvelope,
|
||||
readEvents,
|
||||
readFilePreviewSessions,
|
||||
@@ -23,8 +32,28 @@ export const maxDuration = 3600
|
||||
|
||||
const logger = createLogger('CopilotChatStreamAPI')
|
||||
const POLL_INTERVAL_MS = 250
|
||||
const REPLAY_KEEPALIVE_INTERVAL_MS = 15_000
|
||||
const MAX_STREAM_MS = 60 * 60 * 1000
|
||||
|
||||
function extractCanonicalRequestId(value: unknown): string {
|
||||
return typeof value === 'string' && value.length > 0 ? value : ''
|
||||
}
|
||||
|
||||
function extractRunRequestId(run: { requestContext?: unknown } | null | undefined): string {
|
||||
if (!run || typeof run.requestContext !== 'object' || run.requestContext === null) {
|
||||
return ''
|
||||
}
|
||||
const requestContext = run.requestContext as Record<string, unknown>
|
||||
return (
|
||||
extractCanonicalRequestId(requestContext.requestId) ||
|
||||
extractCanonicalRequestId(requestContext.simRequestId)
|
||||
)
|
||||
}
|
||||
|
||||
function extractEnvelopeRequestId(envelope: { trace?: { requestId?: unknown } }): string {
|
||||
return extractCanonicalRequestId(envelope.trace?.requestId)
|
||||
}
|
||||
|
||||
function isTerminalStatus(
|
||||
status: string | null | undefined
|
||||
): status is MothershipStreamV1CompletionStatus {
|
||||
@@ -42,10 +71,12 @@ function buildResumeTerminalEnvelopes(options: {
|
||||
message?: string
|
||||
code: string
|
||||
reason?: string
|
||||
requestId?: string
|
||||
}) {
|
||||
const baseSeq = Number(options.afterCursor || '0')
|
||||
const seq = Number.isFinite(baseSeq) ? baseSeq : 0
|
||||
const envelopes: ReturnType<typeof createEvent>[] = []
|
||||
const rid = options.requestId ?? ''
|
||||
|
||||
if (options.status === MothershipStreamV1CompletionStatus.error) {
|
||||
envelopes.push(
|
||||
@@ -53,7 +84,7 @@ function buildResumeTerminalEnvelopes(options: {
|
||||
streamId: options.streamId,
|
||||
cursor: String(seq + 1),
|
||||
seq: seq + 1,
|
||||
requestId: '',
|
||||
requestId: rid,
|
||||
type: MothershipStreamV1EventType.error,
|
||||
payload: {
|
||||
message: options.message || 'Stream recovery failed before completion.',
|
||||
@@ -68,7 +99,7 @@ function buildResumeTerminalEnvelopes(options: {
|
||||
streamId: options.streamId,
|
||||
cursor: String(seq + envelopes.length + 1),
|
||||
seq: seq + envelopes.length + 1,
|
||||
requestId: '',
|
||||
requestId: rid,
|
||||
type: MothershipStreamV1EventType.complete,
|
||||
payload: {
|
||||
status: options.status,
|
||||
@@ -97,10 +128,77 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
return NextResponse.json({ error: 'streamId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Root span for the whole resume/reconnect request. In stream mode the
|
||||
// work happens inside `ReadableStream.start`, which the Node runtime
|
||||
// invokes after this function returns and OUTSIDE the AsyncLocalStorage
|
||||
// scope installed by `startActiveSpan`. We therefore start the span
|
||||
// manually, capture its context, and re-enter that context inside the
|
||||
// stream callback so every nested `withCopilotSpan` / `withDbSpan` call
|
||||
// attaches to this root.
|
||||
//
|
||||
// `contextFromRequestHeaders` extracts the W3C `traceparent` the
|
||||
// client echoed (set via `streamTraceparentRef` on Sim's chat POST
|
||||
// response), so the resume span becomes a child of the original
|
||||
// chat's `gen_ai.agent.execute` trace instead of a disconnected
|
||||
// new root. On reconnects after page reload (client ref was wiped)
|
||||
// the header is absent and extraction leaves the ambient context
|
||||
// alone → the resume span becomes its own root. Same as pre-
|
||||
// linking behavior; no regression.
|
||||
const incomingContext = contextFromRequestHeaders(request.headers)
|
||||
const rootSpan = getCopilotTracer().startSpan(
|
||||
TraceSpan.CopilotResumeRequest,
|
||||
{
|
||||
attributes: {
|
||||
[TraceAttr.CopilotTransport]: batchMode ? CopilotTransport.Batch : CopilotTransport.Stream,
|
||||
[TraceAttr.StreamId]: streamId,
|
||||
[TraceAttr.UserId]: authenticatedUserId,
|
||||
[TraceAttr.CopilotResumeAfterCursor]: afterCursor || '0',
|
||||
},
|
||||
},
|
||||
incomingContext
|
||||
)
|
||||
const rootContext = trace.setSpan(incomingContext, rootSpan)
|
||||
|
||||
try {
|
||||
return await otelContext.with(rootContext, () =>
|
||||
handleResumeRequestBody({
|
||||
request,
|
||||
streamId,
|
||||
afterCursor,
|
||||
batchMode,
|
||||
authenticatedUserId,
|
||||
rootSpan,
|
||||
rootContext,
|
||||
})
|
||||
)
|
||||
} catch (err) {
|
||||
markSpanForError(rootSpan, err)
|
||||
rootSpan.end()
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
async function handleResumeRequestBody({
|
||||
request,
|
||||
streamId,
|
||||
afterCursor,
|
||||
batchMode,
|
||||
authenticatedUserId,
|
||||
rootSpan,
|
||||
rootContext,
|
||||
}: {
|
||||
request: NextRequest
|
||||
streamId: string
|
||||
afterCursor: string
|
||||
batchMode: boolean
|
||||
authenticatedUserId: string
|
||||
rootSpan: import('@opentelemetry/api').Span
|
||||
rootContext: import('@opentelemetry/api').Context
|
||||
}) {
|
||||
const run = await getLatestRunForStream(streamId, authenticatedUserId).catch((err) => {
|
||||
logger.warn('Failed to fetch latest run for stream', {
|
||||
streamId,
|
||||
error: toError(err).message,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
})
|
||||
@@ -112,8 +210,11 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
runStatus: run?.status,
|
||||
})
|
||||
if (!run) {
|
||||
rootSpan.setAttribute(TraceAttr.CopilotResumeOutcome, CopilotResumeOutcome.StreamNotFound)
|
||||
rootSpan.end()
|
||||
return NextResponse.json({ error: 'Stream not found' }, { status: 404 })
|
||||
}
|
||||
rootSpan.setAttribute(TraceAttr.CopilotRunStatus, run.status)
|
||||
|
||||
if (batchMode) {
|
||||
const afterSeq = afterCursor || '0'
|
||||
@@ -122,7 +223,7 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
readFilePreviewSessions(streamId).catch((error) => {
|
||||
logger.warn('Failed to read preview sessions for stream batch', {
|
||||
streamId,
|
||||
error: toError(error).message,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
return []
|
||||
}),
|
||||
@@ -135,6 +236,12 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
previewSessionCount: previewSessions.length,
|
||||
runStatus: run.status,
|
||||
})
|
||||
rootSpan.setAttributes({
|
||||
[TraceAttr.CopilotResumeOutcome]: CopilotResumeOutcome.BatchDelivered,
|
||||
[TraceAttr.CopilotResumeEventCount]: batchEvents.length,
|
||||
[TraceAttr.CopilotResumePreviewSessionCount]: previewSessions.length,
|
||||
})
|
||||
rootSpan.end()
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
events: batchEvents,
|
||||
@@ -144,165 +251,230 @@ export const GET = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
const startTime = Date.now()
|
||||
let totalEventsFlushed = 0
|
||||
let pollIterations = 0
|
||||
|
||||
const stream = new ReadableStream({
|
||||
async start(controller) {
|
||||
let cursor = afterCursor || '0'
|
||||
let controllerClosed = false
|
||||
let sawTerminalEvent = false
|
||||
|
||||
const closeController = () => {
|
||||
if (controllerClosed) return
|
||||
controllerClosed = true
|
||||
try {
|
||||
controller.close()
|
||||
} catch {
|
||||
// Controller already closed by runtime/client
|
||||
}
|
||||
}
|
||||
|
||||
const enqueueEvent = (payload: unknown) => {
|
||||
if (controllerClosed) return false
|
||||
try {
|
||||
controller.enqueue(encodeSSEEnvelope(payload))
|
||||
return true
|
||||
} catch {
|
||||
controllerClosed = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const abortListener = () => {
|
||||
controllerClosed = true
|
||||
}
|
||||
request.signal.addEventListener('abort', abortListener, { once: true })
|
||||
|
||||
const flushEvents = async () => {
|
||||
const events = await readEvents(streamId, cursor)
|
||||
if (events.length > 0) {
|
||||
logger.info('[Resume] Flushing events', {
|
||||
streamId,
|
||||
afterCursor: cursor,
|
||||
eventCount: events.length,
|
||||
})
|
||||
}
|
||||
for (const envelope of events) {
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const emitTerminalIfMissing = (
|
||||
status: MothershipStreamV1CompletionStatus,
|
||||
options?: { message?: string; code: string; reason?: string }
|
||||
) => {
|
||||
if (controllerClosed || sawTerminalEvent) {
|
||||
return
|
||||
}
|
||||
for (const envelope of buildResumeTerminalEnvelopes({
|
||||
streamId,
|
||||
afterCursor: cursor,
|
||||
status,
|
||||
message: options?.message,
|
||||
code: options?.code ?? 'resume_terminal',
|
||||
reason: options?.reason,
|
||||
})) {
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const gap = await checkForReplayGap(streamId, afterCursor)
|
||||
if (gap) {
|
||||
for (const envelope of gap.envelopes) {
|
||||
enqueueEvent(envelope)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
await flushEvents()
|
||||
|
||||
while (!controllerClosed && Date.now() - startTime < MAX_STREAM_MS) {
|
||||
const currentRun = await getLatestRunForStream(streamId, authenticatedUserId).catch(
|
||||
(err) => {
|
||||
logger.warn('Failed to poll latest run for stream', {
|
||||
streamId,
|
||||
error: toError(err).message,
|
||||
})
|
||||
return null
|
||||
}
|
||||
)
|
||||
if (!currentRun) {
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream could not be recovered because its run metadata is unavailable.',
|
||||
code: 'resume_run_unavailable',
|
||||
reason: 'run_unavailable',
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
await flushEvents()
|
||||
|
||||
if (controllerClosed) {
|
||||
break
|
||||
}
|
||||
if (isTerminalStatus(currentRun.status)) {
|
||||
emitTerminalIfMissing(currentRun.status, {
|
||||
message:
|
||||
currentRun.status === MothershipStreamV1CompletionStatus.error
|
||||
? typeof currentRun.error === 'string'
|
||||
? currentRun.error
|
||||
: 'The recovered stream ended with an error.'
|
||||
: undefined,
|
||||
code: 'resume_terminal_status',
|
||||
reason: 'terminal_status',
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
if (request.signal.aborted) {
|
||||
controllerClosed = true
|
||||
break
|
||||
}
|
||||
|
||||
await sleep(POLL_INTERVAL_MS)
|
||||
}
|
||||
if (!controllerClosed && Date.now() - startTime >= MAX_STREAM_MS) {
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream recovery timed out before completion.',
|
||||
code: 'resume_timeout',
|
||||
reason: 'timeout',
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
if (!controllerClosed && !request.signal.aborted) {
|
||||
logger.warn('Stream replay failed', {
|
||||
streamId,
|
||||
error: toError(error).message,
|
||||
})
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream replay failed before completion.',
|
||||
code: 'resume_internal',
|
||||
reason: 'stream_replay_failed',
|
||||
})
|
||||
}
|
||||
} finally {
|
||||
request.signal.removeEventListener('abort', abortListener)
|
||||
closeController()
|
||||
}
|
||||
// Re-enter the root OTel context so any `withCopilotSpan` call below
|
||||
// (inside flushEvents/checkForReplayGap/etc.) parents under
|
||||
// copilot.resume.request instead of becoming an orphan.
|
||||
return otelContext.with(rootContext, () => startInner(controller))
|
||||
},
|
||||
})
|
||||
|
||||
async function startInner(controller: ReadableStreamDefaultController) {
|
||||
let cursor = afterCursor || '0'
|
||||
let controllerClosed = false
|
||||
let sawTerminalEvent = false
|
||||
let currentRequestId = extractRunRequestId(run)
|
||||
let lastWriteTime = Date.now()
|
||||
// Stamp the logical request id + chat id on the resume root as soon
|
||||
// as we resolve them from the run row, so TraceQL joins work on
|
||||
// resume legs the same way they do on the original POST.
|
||||
if (currentRequestId) {
|
||||
rootSpan.setAttribute(TraceAttr.RequestId, currentRequestId)
|
||||
rootSpan.setAttribute(TraceAttr.SimRequestId, currentRequestId)
|
||||
}
|
||||
if (run?.chatId) {
|
||||
rootSpan.setAttribute(TraceAttr.ChatId, run.chatId)
|
||||
}
|
||||
|
||||
const closeController = () => {
|
||||
if (controllerClosed) return
|
||||
controllerClosed = true
|
||||
try {
|
||||
controller.close()
|
||||
} catch {
|
||||
// Controller already closed by runtime/client
|
||||
}
|
||||
}
|
||||
|
||||
const enqueueEvent = (payload: unknown) => {
|
||||
if (controllerClosed) return false
|
||||
try {
|
||||
controller.enqueue(encodeSSEEnvelope(payload))
|
||||
lastWriteTime = Date.now()
|
||||
return true
|
||||
} catch {
|
||||
controllerClosed = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const enqueueComment = (comment: string) => {
|
||||
if (controllerClosed) return false
|
||||
try {
|
||||
controller.enqueue(encodeSSEComment(comment))
|
||||
lastWriteTime = Date.now()
|
||||
return true
|
||||
} catch {
|
||||
controllerClosed = true
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
const abortListener = () => {
|
||||
controllerClosed = true
|
||||
}
|
||||
request.signal.addEventListener('abort', abortListener, { once: true })
|
||||
|
||||
const flushEvents = async () => {
|
||||
const events = await readEvents(streamId, cursor)
|
||||
if (events.length > 0) {
|
||||
logger.debug('[Resume] Flushing events', {
|
||||
streamId,
|
||||
afterCursor: cursor,
|
||||
eventCount: events.length,
|
||||
})
|
||||
}
|
||||
for (const envelope of events) {
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
totalEventsFlushed += 1
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
currentRequestId = extractEnvelopeRequestId(envelope) || currentRequestId
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const emitTerminalIfMissing = (
|
||||
status: MothershipStreamV1CompletionStatus,
|
||||
options?: { message?: string; code: string; reason?: string }
|
||||
) => {
|
||||
if (controllerClosed || sawTerminalEvent) {
|
||||
return
|
||||
}
|
||||
for (const envelope of buildResumeTerminalEnvelopes({
|
||||
streamId,
|
||||
afterCursor: cursor,
|
||||
status,
|
||||
message: options?.message,
|
||||
code: options?.code ?? 'resume_terminal',
|
||||
reason: options?.reason,
|
||||
requestId: currentRequestId,
|
||||
})) {
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
enqueueComment('accepted')
|
||||
|
||||
const gap = await checkForReplayGap(streamId, afterCursor, currentRequestId)
|
||||
if (gap) {
|
||||
for (const envelope of gap.envelopes) {
|
||||
if (!enqueueEvent(envelope)) {
|
||||
break
|
||||
}
|
||||
cursor = envelope.stream.cursor ?? String(envelope.seq)
|
||||
currentRequestId = extractEnvelopeRequestId(envelope) || currentRequestId
|
||||
if (envelope.type === MothershipStreamV1EventType.complete) {
|
||||
sawTerminalEvent = true
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
await flushEvents()
|
||||
|
||||
while (!controllerClosed && Date.now() - startTime < MAX_STREAM_MS) {
|
||||
pollIterations += 1
|
||||
const currentRun = await getLatestRunForStream(streamId, authenticatedUserId).catch(
|
||||
(err) => {
|
||||
logger.warn('Failed to poll latest run for stream', {
|
||||
streamId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
}
|
||||
)
|
||||
if (!currentRun) {
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream could not be recovered because its run metadata is unavailable.',
|
||||
code: 'resume_run_unavailable',
|
||||
reason: 'run_unavailable',
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
currentRequestId = extractRunRequestId(currentRun) || currentRequestId
|
||||
|
||||
await flushEvents()
|
||||
|
||||
if (controllerClosed) {
|
||||
break
|
||||
}
|
||||
if (isTerminalStatus(currentRun.status)) {
|
||||
emitTerminalIfMissing(currentRun.status, {
|
||||
message:
|
||||
currentRun.status === MothershipStreamV1CompletionStatus.error
|
||||
? typeof currentRun.error === 'string'
|
||||
? currentRun.error
|
||||
: 'The recovered stream ended with an error.'
|
||||
: undefined,
|
||||
code: 'resume_terminal_status',
|
||||
reason: 'terminal_status',
|
||||
})
|
||||
break
|
||||
}
|
||||
|
||||
if (request.signal.aborted) {
|
||||
controllerClosed = true
|
||||
break
|
||||
}
|
||||
|
||||
if (Date.now() - lastWriteTime >= REPLAY_KEEPALIVE_INTERVAL_MS) {
|
||||
enqueueComment('keepalive')
|
||||
}
|
||||
|
||||
await sleep(POLL_INTERVAL_MS)
|
||||
}
|
||||
if (!controllerClosed && Date.now() - startTime >= MAX_STREAM_MS) {
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream recovery timed out before completion.',
|
||||
code: 'resume_timeout',
|
||||
reason: 'timeout',
|
||||
})
|
||||
}
|
||||
} catch (error) {
|
||||
if (!controllerClosed && !request.signal.aborted) {
|
||||
logger.warn('Stream replay failed', {
|
||||
streamId,
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
})
|
||||
emitTerminalIfMissing(MothershipStreamV1CompletionStatus.error, {
|
||||
message: 'The stream replay failed before completion.',
|
||||
code: 'resume_internal',
|
||||
reason: 'stream_replay_failed',
|
||||
})
|
||||
}
|
||||
markSpanForError(rootSpan, error)
|
||||
} finally {
|
||||
request.signal.removeEventListener('abort', abortListener)
|
||||
closeController()
|
||||
rootSpan.setAttributes({
|
||||
[TraceAttr.CopilotResumeOutcome]: sawTerminalEvent
|
||||
? CopilotResumeOutcome.TerminalDelivered
|
||||
: controllerClosed
|
||||
? CopilotResumeOutcome.ClientDisconnected
|
||||
: CopilotResumeOutcome.EndedWithoutTerminal,
|
||||
[TraceAttr.CopilotResumeEventCount]: totalEventsFlushed,
|
||||
[TraceAttr.CopilotResumePollIterations]: pollIterations,
|
||||
[TraceAttr.CopilotResumeDurationMs]: Date.now() - startTime,
|
||||
})
|
||||
rootSpan.end()
|
||||
}
|
||||
}
|
||||
|
||||
return new Response(stream, { headers: SSE_RESPONSE_HEADERS })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { copilotChats, permissions, workflow, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, desc, eq, isNull, or, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
@@ -13,7 +14,6 @@ import {
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { taskPubSub } from '@/lib/copilot/tasks'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('CopilotChatsListAPI')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { authMockFns, workflowsUtilsMock, workflowsUtilsMockFns } from '@sim/testing'
|
||||
import { authMockFns, workflowAuthzMockFns, workflowsUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -63,7 +63,7 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
|
||||
authMockFns.mockGetSession.mockResolvedValue(null)
|
||||
|
||||
workflowsUtilsMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
})
|
||||
@@ -251,7 +251,7 @@ describe('Copilot Checkpoints Revert API Route', () => {
|
||||
thenResults.push(mockCheckpoint) // Checkpoint found
|
||||
thenResults.push(mockWorkflow) // Workflow found but different user
|
||||
|
||||
workflowsUtilsMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValueOnce({
|
||||
workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
status: 403,
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowCheckpoints, workflow as workflowTable } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
@@ -14,7 +15,6 @@ import {
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { getInternalApiBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
import { isUuidV4 } from '@/executor/constants'
|
||||
|
||||
const logger = createLogger('CheckpointRevertAPI')
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { authMockFns, workflowsUtilsMock, workflowsUtilsMockFns } from '@sim/testing'
|
||||
import { authMockFns, workflowAuthzMockFns, workflowsUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -79,7 +79,7 @@ describe('Copilot Checkpoints API Route', () => {
|
||||
userId: 'user-123',
|
||||
workflowId: 'workflow-123',
|
||||
})
|
||||
workflowsUtilsMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
workflowAuthzMockFns.mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { workflowCheckpoints } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@sim/workflow-authz'
|
||||
import { and, desc, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
@@ -13,7 +14,6 @@ import {
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
|
||||
|
||||
const logger = createLogger('WorkflowCheckpointsAPI')
|
||||
|
||||
|
||||
@@ -206,7 +206,7 @@ describe('Copilot Confirm API Route', () => {
|
||||
})
|
||||
})
|
||||
|
||||
it('returns 400 when the durable write fails before publish', async () => {
|
||||
it('returns 500 when the durable write fails before publish', async () => {
|
||||
completeAsyncToolCall.mockRejectedValueOnce(new Error('db down'))
|
||||
|
||||
const response = await POST(
|
||||
@@ -216,7 +216,7 @@ describe('Copilot Confirm API Route', () => {
|
||||
})
|
||||
)
|
||||
|
||||
expect(response.status).toBe(400)
|
||||
expect(response.status).toBe(500)
|
||||
expect(publishToolConfirmation).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -14,6 +14,9 @@ import {
|
||||
getRunSegment,
|
||||
upsertAsyncToolCall,
|
||||
} from '@/lib/copilot/async-runs/repository'
|
||||
import { CopilotConfirmOutcome } from '@/lib/copilot/generated/trace-attribute-values-v1'
|
||||
import { TraceAttr } from '@/lib/copilot/generated/trace-attributes-v1'
|
||||
import { TraceSpan } from '@/lib/copilot/generated/trace-spans-v1'
|
||||
import { publishToolConfirmation } from '@/lib/copilot/persistence/tool-confirm'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
@@ -23,6 +26,7 @@ import {
|
||||
createRequestTracker,
|
||||
createUnauthorizedResponse,
|
||||
} from '@/lib/copilot/request/http'
|
||||
import { withIncomingGoSpan } from '@/lib/copilot/request/otel'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
const logger = createLogger('CopilotConfirmAPI')
|
||||
@@ -114,93 +118,112 @@ async function updateToolCallStatus(
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/copilot/confirm
|
||||
* Accept client tool completion or detach confirmations.
|
||||
*/
|
||||
export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
// POST /api/copilot/confirm — delivery path for client-executed tool
|
||||
// results. Correlate via `toolCallId` when the awaiting chat stream
|
||||
// stalls.
|
||||
export const POST = withRouteHandler((req: NextRequest) => {
|
||||
const tracker = createRequestTracker()
|
||||
|
||||
try {
|
||||
// Authenticate user using consolidated helper
|
||||
const { userId: authenticatedUserId, isAuthenticated } =
|
||||
await authenticateCopilotRequestSessionOnly()
|
||||
return withIncomingGoSpan(
|
||||
req.headers,
|
||||
TraceSpan.CopilotConfirmToolResult,
|
||||
{ [TraceAttr.RequestId]: tracker.requestId },
|
||||
async (span) => {
|
||||
try {
|
||||
const { userId: authenticatedUserId, isAuthenticated } =
|
||||
await authenticateCopilotRequestSessionOnly()
|
||||
|
||||
if (!isAuthenticated) {
|
||||
return createUnauthorizedResponse()
|
||||
if (!isAuthenticated || !authenticatedUserId) {
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.Unauthorized)
|
||||
return createUnauthorizedResponse()
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { toolCallId, status, message, data } = ConfirmationSchema.parse(body)
|
||||
span.setAttributes({
|
||||
[TraceAttr.ToolCallId]: toolCallId,
|
||||
[TraceAttr.ToolConfirmationStatus]: status,
|
||||
[TraceAttr.UserId]: authenticatedUserId,
|
||||
})
|
||||
|
||||
const existing = await getAsyncToolCall(toolCallId).catch((err) => {
|
||||
logger.warn('Failed to fetch async tool call', {
|
||||
toolCallId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
if (!existing) {
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.ToolCallNotFound)
|
||||
return createNotFoundResponse('Tool call not found')
|
||||
}
|
||||
if (existing.toolName) span.setAttribute(TraceAttr.ToolName, existing.toolName)
|
||||
if (existing.runId) span.setAttribute(TraceAttr.RunId, existing.runId)
|
||||
|
||||
const run = await getRunSegment(existing.runId).catch((err) => {
|
||||
logger.warn('Failed to fetch run segment', {
|
||||
runId: existing.runId,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
})
|
||||
return null
|
||||
})
|
||||
if (!run) {
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.RunNotFound)
|
||||
return createNotFoundResponse('Tool call run not found')
|
||||
}
|
||||
if (run.userId !== authenticatedUserId) {
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.Forbidden)
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const updated = await updateToolCallStatus(existing, status, message, data)
|
||||
|
||||
if (!updated) {
|
||||
logger.error(`[${tracker.requestId}] Failed to update tool call status`, {
|
||||
userId: authenticatedUserId,
|
||||
toolCallId,
|
||||
status,
|
||||
internalStatus: status,
|
||||
message,
|
||||
})
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.UpdateFailed)
|
||||
// DB write failed — 500, not 400. 400 is a client-shape error.
|
||||
return createInternalServerErrorResponse('Failed to update tool call status')
|
||||
}
|
||||
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.Delivered)
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||
toolCallId,
|
||||
status,
|
||||
})
|
||||
} catch (error) {
|
||||
const duration = tracker.getDuration()
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error(`[${tracker.requestId}] Request validation error:`, {
|
||||
duration,
|
||||
errors: error.errors,
|
||||
})
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.ValidationError)
|
||||
return createBadRequestResponse(
|
||||
`Invalid request data: ${error.errors.map((e) => e.message).join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${tracker.requestId}] Unexpected error:`, {
|
||||
duration,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
span.setAttribute(TraceAttr.CopilotConfirmOutcome, CopilotConfirmOutcome.InternalError)
|
||||
return createInternalServerErrorResponse(
|
||||
error instanceof Error ? error.message : 'Internal server error'
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const body = await req.json()
|
||||
const { toolCallId, status, message, data } = ConfirmationSchema.parse(body)
|
||||
const existing = await getAsyncToolCall(toolCallId).catch((err) => {
|
||||
logger.warn('Failed to fetch async tool call', {
|
||||
toolCallId,
|
||||
error: toError(err).message,
|
||||
})
|
||||
return null
|
||||
})
|
||||
|
||||
if (!existing) {
|
||||
return createNotFoundResponse('Tool call not found')
|
||||
}
|
||||
|
||||
const run = await getRunSegment(existing.runId).catch((err) => {
|
||||
logger.warn('Failed to fetch run segment', {
|
||||
runId: existing.runId,
|
||||
error: toError(err).message,
|
||||
})
|
||||
return null
|
||||
})
|
||||
if (!run) {
|
||||
return createNotFoundResponse('Tool call run not found')
|
||||
}
|
||||
if (run.userId !== authenticatedUserId) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
// Update the durable tool call status and wake any waiters.
|
||||
const updated = await updateToolCallStatus(existing, status, message, data)
|
||||
|
||||
if (!updated) {
|
||||
logger.error(`[${tracker.requestId}] Failed to update tool call status`, {
|
||||
userId: authenticatedUserId,
|
||||
toolCallId,
|
||||
status,
|
||||
internalStatus: status,
|
||||
message,
|
||||
})
|
||||
return createBadRequestResponse('Failed to update tool call status or tool call not found')
|
||||
}
|
||||
|
||||
const duration = tracker.getDuration()
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
message: message || `Tool call ${toolCallId} has been ${status.toLowerCase()}`,
|
||||
toolCallId,
|
||||
status,
|
||||
})
|
||||
} catch (error) {
|
||||
const duration = tracker.getDuration()
|
||||
|
||||
if (error instanceof z.ZodError) {
|
||||
logger.error(`[${tracker.requestId}] Request validation error:`, {
|
||||
duration,
|
||||
errors: error.errors,
|
||||
})
|
||||
return createBadRequestResponse(
|
||||
`Invalid request data: ${error.errors.map((e) => e.message).join(', ')}`
|
||||
)
|
||||
}
|
||||
|
||||
logger.error(`[${tracker.requestId}] Unexpected error:`, {
|
||||
duration,
|
||||
error: error instanceof Error ? error.message : 'Unknown error',
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
|
||||
return createInternalServerErrorResponse(
|
||||
error instanceof Error ? error.message : 'Internal server error'
|
||||
)
|
||||
}
|
||||
)
|
||||
})
|
||||
|
||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { toError } from '@sim/utils/errors'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import { authenticateCopilotRequestSessionOnly } from '@/lib/copilot/request/http'
|
||||
|
||||
interface AvailableModel {
|
||||
@@ -45,10 +46,12 @@ export const GET = withRouteHandler(async (_req: NextRequest) => {
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`${SIM_AGENT_API_URL}/api/get-available-models`, {
|
||||
const response = await fetchGo(`${SIM_AGENT_API_URL}/api/get-available-models`, {
|
||||
method: 'GET',
|
||||
headers,
|
||||
cache: 'no-store',
|
||||
spanName: 'sim → go /api/get-available-models',
|
||||
operation: 'get_available_models',
|
||||
})
|
||||
|
||||
const payload = await response.json().catch(() => ({}))
|
||||
|
||||
@@ -22,6 +22,22 @@ vi.mock('@/lib/core/config/env', () => createEnvMock({ COPILOT_API_KEY: 'test-ap
|
||||
|
||||
import { POST } from '@/app/api/copilot/stats/route'
|
||||
|
||||
// `fetchGo` reads `response.status` and `response.headers.get('content-length')`
|
||||
// to stamp span attributes, so mock responses need both fields or the call
|
||||
// path throws before the route handler sees the body.
|
||||
function buildMockResponse(init: {
|
||||
ok: boolean
|
||||
status?: number
|
||||
json: () => Promise<unknown>
|
||||
}): Record<string, unknown> {
|
||||
return {
|
||||
ok: init.ok,
|
||||
status: init.status ?? (init.ok ? 200 : 500),
|
||||
headers: new Headers(),
|
||||
json: init.json,
|
||||
}
|
||||
}
|
||||
|
||||
describe('Copilot Stats API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
@@ -58,10 +74,12 @@ describe('Copilot Stats API Route', () => {
|
||||
isAuthenticated: true,
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
messageId: 'message-123',
|
||||
@@ -152,10 +170,12 @@ describe('Copilot Stats API Route', () => {
|
||||
isAuthenticated: true,
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
json: () => Promise.resolve({ error: 'Invalid message ID' }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: false,
|
||||
json: () => Promise.resolve({ error: 'Invalid message ID' }),
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
messageId: 'invalid-message',
|
||||
@@ -176,10 +196,12 @@ describe('Copilot Stats API Route', () => {
|
||||
isAuthenticated: true,
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
json: () => Promise.resolve({ message: 'Rate limit exceeded' }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: false,
|
||||
json: () => Promise.resolve({ message: 'Rate limit exceeded' }),
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
messageId: 'message-123',
|
||||
@@ -200,10 +222,12 @@ describe('Copilot Stats API Route', () => {
|
||||
isAuthenticated: true,
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
json: () => Promise.reject(new Error('Not JSON')),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: false,
|
||||
json: () => Promise.reject(new Error('Not JSON')),
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
messageId: 'message-123',
|
||||
@@ -266,10 +290,12 @@ describe('Copilot Stats API Route', () => {
|
||||
isAuthenticated: true,
|
||||
})
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
mockFetch.mockResolvedValueOnce(
|
||||
buildMockResponse({
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ success: true }),
|
||||
})
|
||||
)
|
||||
|
||||
const req = createMockRequest('POST', {
|
||||
messageId: 'message-456',
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { SIM_AGENT_API_URL } from '@/lib/copilot/constants'
|
||||
import { fetchGo } from '@/lib/copilot/request/go/fetch'
|
||||
import {
|
||||
authenticateCopilotRequestSessionOnly,
|
||||
createBadRequestResponse,
|
||||
@@ -40,13 +41,15 @@ export const POST = withRouteHandler(async (req: NextRequest) => {
|
||||
diffAccepted,
|
||||
}
|
||||
|
||||
const agentRes = await fetch(`${SIM_AGENT_API_URL}/api/stats`, {
|
||||
const agentRes = await fetchGo(`${SIM_AGENT_API_URL}/api/stats`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...(env.COPILOT_API_KEY ? { 'x-api-key': env.COPILOT_API_KEY } : {}),
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
spanName: 'sim → go /api/stats',
|
||||
operation: 'stats_ingest',
|
||||
})
|
||||
|
||||
// Prefer not to block clients; still relay status
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credentialSet, credentialSetInvitation, member, organization, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credentialSet, credentialSetInvitation, member, organization, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
@@ -6,7 +7,6 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getEmailSubject, renderPollingGroupInvitationEmail } from '@/components/emails'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { account, credentialSet, credentialSetMember, member, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credentialSet, credentialSetMember, member } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import {
|
||||
credentialSet,
|
||||
@@ -9,9 +10,9 @@ import { createLogger } from '@sim/logger'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { normalizeEmail } from '@/lib/invitations/core'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
const logger = createLogger('CredentialSetInviteToken')
|
||||
@@ -111,6 +112,21 @@ export const POST = withRouteHandler(
|
||||
return NextResponse.json({ error: 'Invitation has expired' }, { status: 410 })
|
||||
}
|
||||
|
||||
if (invitation.email) {
|
||||
const sessionEmail = session.user.email
|
||||
if (!sessionEmail || normalizeEmail(sessionEmail) !== normalizeEmail(invitation.email)) {
|
||||
logger.warn('Rejected credential set invitation accept due to email mismatch', {
|
||||
invitationId: invitation.id,
|
||||
credentialSetId: invitation.credentialSetId,
|
||||
userId: session.user.id,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: 'This invitation was sent to a different email address' },
|
||||
{ status: 403 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const existingMember = await db
|
||||
.select()
|
||||
.from(credentialSetMember)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credentialSet, credentialSetMember, organization } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateId } from '@sim/utils/id'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credentialSet, credentialSetMember, member, organization, user } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
@@ -5,7 +6,6 @@ import { generateId } from '@sim/utils/id'
|
||||
import { and, count, desc, eq } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasCredentialSetsAccess } from '@/lib/billing'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { credential, credentialMember, environment, workspaceEnvironment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
@@ -5,7 +6,6 @@ import { generateId } from '@sim/utils/id'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { account, credential, credentialMember, workspace } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
@@ -5,7 +6,6 @@ import { generateId } from '@sim/utils/id'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { db } from '@sim/db'
|
||||
import { environment } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
@@ -5,7 +6,6 @@ import { generateId } from '@sim/utils/id'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
@@ -8,21 +8,61 @@ import {
|
||||
isUsingCloudStorage,
|
||||
type StorageContext,
|
||||
} from '@/lib/uploads'
|
||||
import {
|
||||
signUploadToken,
|
||||
type UploadTokenPayload,
|
||||
verifyUploadToken,
|
||||
} from '@/lib/uploads/core/upload-token'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
const logger = createLogger('MultipartUploadAPI')
|
||||
|
||||
const ALLOWED_UPLOAD_CONTEXTS = new Set<StorageContext>([
|
||||
'knowledge-base',
|
||||
'chat',
|
||||
'copilot',
|
||||
'mothership',
|
||||
'execution',
|
||||
'workspace',
|
||||
'profile-pictures',
|
||||
'og-images',
|
||||
'logs',
|
||||
'workspace-logos',
|
||||
])
|
||||
|
||||
interface InitiateMultipartRequest {
|
||||
fileName: string
|
||||
contentType: string
|
||||
fileSize: number
|
||||
workspaceId: string
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest {
|
||||
uploadId: string
|
||||
key: string
|
||||
interface TokenBoundRequest {
|
||||
uploadToken: string
|
||||
}
|
||||
|
||||
interface GetPartUrlsRequest extends TokenBoundRequest {
|
||||
partNumbers: number[]
|
||||
context?: StorageContext
|
||||
}
|
||||
|
||||
interface CompleteSingleRequest extends TokenBoundRequest {
|
||||
parts: unknown
|
||||
}
|
||||
|
||||
interface CompleteBatchRequest {
|
||||
uploads: Array<TokenBoundRequest & { parts: unknown }>
|
||||
}
|
||||
|
||||
const verifyTokenForUser = (token: string | undefined, userId: string) => {
|
||||
if (!token || typeof token !== 'string') {
|
||||
return null
|
||||
}
|
||||
const result = verifyUploadToken(token)
|
||||
if (!result.valid || result.payload.userId !== userId) {
|
||||
return null
|
||||
}
|
||||
return result.payload
|
||||
}
|
||||
|
||||
export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
@@ -31,6 +71,7 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
if (!session?.user?.id) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
const userId = session.user.id
|
||||
|
||||
const action = request.nextUrl.searchParams.get('action')
|
||||
|
||||
@@ -45,32 +86,34 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
|
||||
switch (action) {
|
||||
case 'initiate': {
|
||||
const data: InitiateMultipartRequest = await request.json()
|
||||
const { fileName, contentType, fileSize, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as InitiateMultipartRequest
|
||||
const { fileName, contentType, fileSize, workspaceId, context = 'knowledge-base' } = data
|
||||
|
||||
if (!workspaceId || typeof workspaceId !== 'string') {
|
||||
return NextResponse.json({ error: 'workspaceId is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
if (!ALLOWED_UPLOAD_CONTEXTS.has(context)) {
|
||||
return NextResponse.json({ error: 'Invalid storage context' }, { status: 400 })
|
||||
}
|
||||
|
||||
const permission = await getUserEntityPermissions(userId, 'workspace', workspaceId)
|
||||
if (permission !== 'write' && permission !== 'admin') {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
let uploadId: string
|
||||
let key: string
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { initiateS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await initiateS3MultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
fileSize,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated S3 multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const result = await initiateS3MultipartUpload({ fileName, contentType, fileSize })
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { initiateMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await initiateMultipartUpload({
|
||||
fileName,
|
||||
contentType,
|
||||
@@ -82,46 +125,55 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
connectionString: config.connectionString,
|
||||
},
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated Azure multipart upload for ${fileName} (context: ${context}): ${result.uploadId}`
|
||||
uploadId = result.uploadId
|
||||
key = result.key
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
)
|
||||
|
||||
return NextResponse.json({
|
||||
uploadId: result.uploadId,
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: `Unsupported storage provider: ${storageProvider}` },
|
||||
{ status: 400 }
|
||||
const uploadToken = signUploadToken({
|
||||
uploadId,
|
||||
key,
|
||||
userId,
|
||||
workspaceId,
|
||||
context,
|
||||
})
|
||||
|
||||
logger.info(
|
||||
`Initiated ${storageProvider} multipart upload for ${fileName} (context: ${context}, workspace: ${workspaceId}): ${uploadId}`
|
||||
)
|
||||
|
||||
return NextResponse.json({ uploadId, key, uploadToken })
|
||||
}
|
||||
|
||||
case 'get-part-urls': {
|
||||
const data: GetPartUrlsRequest = await request.json()
|
||||
const { uploadId, key, partNumbers, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as GetPartUrlsRequest
|
||||
const { partNumbers } = data
|
||||
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { getS3MultipartPartUrls } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const presignedUrls = await getS3MultipartPartUrls(key, uploadId, partNumbers)
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { getMultipartPartUrls } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const presignedUrls = await getMultipartPartUrls(key, partNumbers, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return NextResponse.json({ presignedUrls })
|
||||
}
|
||||
|
||||
@@ -132,24 +184,32 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'complete': {
|
||||
const data = await request.json()
|
||||
const context: StorageContext = data.context || 'knowledge-base'
|
||||
const data = (await request.json()) as CompleteSingleRequest | CompleteBatchRequest
|
||||
|
||||
const config = getStorageConfig(context)
|
||||
if ('uploads' in data && Array.isArray(data.uploads)) {
|
||||
const verified = data.uploads.map((upload) => {
|
||||
const payload = verifyTokenForUser(upload.uploadToken, userId)
|
||||
return payload ? { payload, parts: upload.parts } : null
|
||||
})
|
||||
|
||||
if (verified.some((entry) => entry === null)) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const verifiedEntries = verified.filter(
|
||||
(entry): entry is { payload: UploadTokenPayload; parts: unknown } => entry !== null
|
||||
)
|
||||
|
||||
if ('uploads' in data) {
|
||||
const results = await Promise.all(
|
||||
data.uploads.map(async (upload: any) => {
|
||||
const { uploadId, key } = upload
|
||||
verifiedEntries.map(async ({ payload, parts }) => {
|
||||
const { uploadId, key, context } = payload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/s3/client'
|
||||
)
|
||||
const parts = upload.parts // S3 format: { ETag, PartNumber }
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts as any)
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -161,15 +221,12 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
const { completeMultipartUpload } = await import(
|
||||
'@/lib/uploads/providers/blob/client'
|
||||
)
|
||||
const parts = upload.parts // Azure format: { blockId, partNumber }
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
return {
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -182,19 +239,23 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
})
|
||||
)
|
||||
|
||||
logger.info(`Completed ${data.uploads.length} multipart uploads (context: ${context})`)
|
||||
logger.info(`Completed ${verifiedEntries.length} multipart uploads`)
|
||||
return NextResponse.json({ results })
|
||||
}
|
||||
|
||||
const { uploadId, key, parts } = data
|
||||
const single = data as CompleteSingleRequest
|
||||
const tokenPayload = verifyTokenForUser(single.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { completeS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, parts)
|
||||
|
||||
const result = await completeS3MultipartUpload(key, uploadId, single.parts as any)
|
||||
logger.info(`Completed S3 multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -204,16 +265,13 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
if (storageProvider === 'blob') {
|
||||
const { completeMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
const result = await completeMultipartUpload(key, parts, {
|
||||
const result = await completeMultipartUpload(key, single.parts as any, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Completed Azure multipart upload for key ${key} (context: ${context})`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
location: result.location,
|
||||
@@ -229,27 +287,27 @@ export const POST = withRouteHandler(async (request: NextRequest) => {
|
||||
}
|
||||
|
||||
case 'abort': {
|
||||
const data = await request.json()
|
||||
const { uploadId, key, context = 'knowledge-base' } = data
|
||||
const data = (await request.json()) as TokenBoundRequest
|
||||
const tokenPayload = verifyTokenForUser(data.uploadToken, userId)
|
||||
if (!tokenPayload) {
|
||||
return NextResponse.json({ error: 'Invalid or expired upload token' }, { status: 403 })
|
||||
}
|
||||
|
||||
const config = getStorageConfig(context as StorageContext)
|
||||
const { uploadId, key, context } = tokenPayload
|
||||
const config = getStorageConfig(context)
|
||||
|
||||
if (storageProvider === 's3') {
|
||||
const { abortS3MultipartUpload } = await import('@/lib/uploads/providers/s3/client')
|
||||
|
||||
await abortS3MultipartUpload(key, uploadId)
|
||||
|
||||
logger.info(`Aborted S3 multipart upload for key ${key} (context: ${context})`)
|
||||
} else if (storageProvider === 'blob') {
|
||||
const { abortMultipartUpload } = await import('@/lib/uploads/providers/blob/client')
|
||||
|
||||
await abortMultipartUpload(key, {
|
||||
containerName: config.containerName!,
|
||||
accountName: config.accountName!,
|
||||
accountKey: config.accountKey,
|
||||
connectionString: config.connectionString,
|
||||
})
|
||||
|
||||
logger.info(`Aborted Azure multipart upload for key ${key} (context: ${context})`)
|
||||
} else {
|
||||
return NextResponse.json(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { createHash } from 'crypto'
|
||||
import { readFile } from 'fs/promises'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { sha256Hex } from '@sim/security/hash'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
|
||||
@@ -80,11 +80,7 @@ async function compileDocumentIfNeeded(
|
||||
}
|
||||
|
||||
const code = buffer.toString('utf-8')
|
||||
const cacheKey = createHash('sha256')
|
||||
.update(ext)
|
||||
.update(code)
|
||||
.update(workspaceId ?? '')
|
||||
.digest('hex')
|
||||
const cacheKey = sha256Hex(`${ext}${code}${workspaceId ?? ''}`)
|
||||
const cached = compiledDocCache.get(cacheKey)
|
||||
if (cached) {
|
||||
return { buffer: cached, contentType: format.contentType }
|
||||
|
||||
@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { sanitizeFileName } from '@/executor/constants'
|
||||
import '@/lib/uploads/core/setup.server'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@sim/audit'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { withRouteHandler } from '@/lib/core/utils/with-route-handler'
|
||||
import { captureServerEvent } from '@/lib/posthog/server'
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user