Compare commits

...

51 Commits
dev ... v0.6.22

Author SHA1 Message Date
Waleed
e8f7fe0989 v0.6.22: agentmail, rootly, landing fixes, analytics, credentials block 2026-04-03 01:14:36 -07:00
Waleed
ace87791d8 feat(analytics): add PostHog product analytics (#3910)
* feat(analytics): add PostHog product analytics

* fix(posthog): fix workspace group via URL params, type errors, and clean up comments

* fix(posthog): address PR review - fix pre-tx event, auth_method, paused executions, enterprise cancellation, settings double-fire

* chore(posthog): remove unused identifyServerPerson

* fix(posthog): isolate processQueuedResumes errors, simplify settings posthog deps

* fix(posthog): correctly classify SSO auth_method, fix phantom empty-string workspace groups

* fix(posthog): remove usePostHog from memo'd TemplateCard, fix copilot chat phantom workspace group

* fix(posthog): eliminate all remaining phantom empty-string workspace groups

* fix(posthog): fix cancel route phantom group, remove redundant workspaceId shadow in catch block

* fix(posthog): use ids.length for block_removed guard to handle container blocks with descendants

* chore(posthog): remove unused removedBlockTypes variable

* fix(posthog): remove phantom $set person properties from subscription events

* fix(posthog): add passedKnowledgeBaseName to knowledge_base_opened effect deps

* fix(posthog): capture currentWorkflowId synchronously before async import to avoid stale closure

* fix(posthog): add typed captureEvent wrapper for React components, deduplicate copilot_panel_opened

* feat(posthog): add task_created and task_message_sent events, remove copilot_panel_opened

* feat(posthog): track task_renamed, task_deleted, task_marked_read, task_marked_unread

* feat(analytics): expand posthog event coverage with source tracking and lifecycle events

* fix(analytics): flush posthog events on SIGTERM before ECS task termination

* fix(analytics): fix posthog in useCallback deps and fire block events for bulk operations
2026-04-03 01:00:35 -07:00
Waleed
74af452175 feat(blocks): add Credential block (#3907)
* feat(blocks): add Credential block

* fix(blocks): explicit workspaceId guard in credential handler, clarify hasOAuthSelection

* feat(credential): add list operation with type/provider filters

* feat(credential): restrict to OAuth only, remove env vars and service accounts

* docs(credential): update screenshots

* fix(credential): remove stale isServiceAccount dep from overlayContent memo

* fix(credential): filter to oauth-only in handleComboboxChange matchedCred lookup
2026-04-02 23:15:15 -07:00
Waleed
ec51f73596 feat(email): abandoned checkout email, 80% free tier warning, credits exhausted email (#3908)
* feat(email): send plain personal email on abandoned checkout

* feat(email): lower free tier warning to 80% and add credits exhausted email

* feat(email): use wordmark in email header instead of icon-only logo

* fix(email): restore accidentally deleted social icons in email footer

* fix(email): prevent double email for free users at 80%, fix subject line

* improvement(emails): extract shared plain email styles and proFeatures constant, fix double email on 100% usage

* fix(email): filter subscription-mode checkout, skip already-subscribed users, fix preview text

* fix(email): use notifications type for onboarding followup to respect unsubscribe preferences

* fix(email): use limit instead of currentUsage in credits exhausted email body

* fix(email): use notifications type for abandoned checkout, clarify crosses80 comment

* chore(email): rename _constants.ts to constants.ts

* fix(email): use isProPlan to catch org-level subscriptions in abandoned checkout guard

* fix(email): align onboarding followup delay to 5 days for email/password users
2026-04-02 19:31:29 -07:00
Theodore Li
6866da590c fix(tools) Directly query db for custom tool id (#3875)
* Directly query db for custom tool id

* Switch back to inline imports

* Fix lint

* Fix test

* Fix greptile comments

* Fix lint

* Make userId and workspaceId required

* Add back nullable userId and workspaceId fields

---------

Co-authored-by: Theodore Li <theo@sim.ai>
2026-04-02 22:13:37 -04:00
Waleed
b0c0ee29a8 feat(email): send onboarding followup email 3 days after signup (#3906)
* feat(email): send onboarding followup email 3 days after signup

* fix(email): add trigger guard, idempotency key, and shared task ID constant

* fix(email): increase onboarding followup delay from 3 to 5 days
2026-04-02 18:08:14 -07:00
Waleed
f0d1950477 v0.6.21: concurrency FF, blog theme 2026-04-02 13:08:59 -07:00
Waleed
0fdd8ffb55 v0.6.20: oauth default credential name, models pages, new models, rippling and rootly integrations 2026-04-02 11:44:24 -07:00
Waleed
d581009099 v0.6.19: vllm fixes, loading improevments, reactquery standardization, new gpt 5.4 models, fireworks provider support, launchdarkly, tailscale, extend integrations 2026-03-31 20:17:00 -07:00
Waleed
7d0fdefb22 v0.6.18: file operations block, profound integration, edge connection improvements, copy logs, knowledgebase robustness 2026-03-30 21:35:41 -07:00
Waleed
73e00f53e1 v0.6.17: trigger.dev CI, workers FF 2026-03-30 09:33:30 -07:00
Vikhyath Mondreti
1d7ae906bc v0.6.16: bullmq optionality 2026-03-30 00:12:21 -07:00
Waleed
560fa75155 v0.6.15: workers, security hardening, sidebar improvements, chat fixes, profound 2026-03-29 23:02:19 -07:00
Waleed
14089f7dbb v0.6.14: performance improvements, connectors UX, collapsed sidebar actions 2026-03-27 13:07:59 -07:00
Waleed
e615816dce v0.6.13: emcn standardization, granola and ketch integrations, security hardening, connectors improvements 2026-03-27 00:16:37 -07:00
Waleed
ca87d7ce29 v0.6.12: billing, blogs UI 2026-03-26 01:19:23 -07:00
Waleed
6bebbc5e29 v0.6.11: billing fixes, rippling, hubspot, UI improvements, demo modal 2026-03-25 22:54:56 -07:00
Waleed
7b572f1f61 v0.6.10: tour fix, connectors reliability improvements, tooltip gif fixes 2026-03-24 21:38:19 -07:00
Vikhyath Mondreti
ed9a71f0af v0.6.9: general ux improvements for tables, mothership 2026-03-24 17:03:24 -07:00
Siddharth Ganesan
c78c870fda v0.6.8: mothership tool loop
v0.6.8: mothership tool loop
2026-03-24 04:06:19 -07:00
Waleed
19442f19e2 v0.6.7: kb improvements, edge z index fix, captcha, new trust center, block classifications 2026-03-21 12:43:33 -07:00
Waleed
1731a4d7f0 v0.6.6: landing improvements, styling consistency, mothership table renaming 2026-03-19 23:58:30 -07:00
Waleed
9fcd02fd3b v0.6.5: email validation, integrations page, mothership and custom tool fixes 2026-03-19 16:08:30 -07:00
Waleed
ff7b5b528c v0.6.4: subflows, docusign, ashby new tools, box, workday, billing bug fixes 2026-03-18 23:12:36 -07:00
Waleed
30f2d1a0fc v0.6.3: hubspot integration, kb block improvements 2026-03-18 11:19:55 -07:00
Waleed
4bd0731871 v0.6.2: mothership stability, chat iframe embedding, KB upserts, new blog post 2026-03-18 03:29:39 -07:00
Waleed
4f3bc37fe4 v0.6.1: added better auth admin plugin 2026-03-17 15:16:16 -07:00
Waleed
84d6fdc423 v0.6: mothership, tables, connectors 2026-03-17 12:21:15 -07:00
Vikhyath Mondreti
4c12914d35 v0.5.113: jira, ashby, google ads, grain updates 2026-03-12 22:54:25 -07:00
Waleed
e9bdc57616 v0.5.112: trace spans improvements, fathom integration, jira fixes, canvas navigation updates 2026-03-12 13:30:20 -07:00
Vikhyath Mondreti
36612ae42a v0.5.111: non-polling webhook execs off trigger.dev, gmail subject headers, webhook trigger configs (#3530) 2026-03-11 17:47:28 -07:00
Waleed
1c2c2c65d4 v0.5.110: webhook execution speedups, SSRF patches 2026-03-11 15:00:24 -07:00
Waleed
ecd3536a72 v0.5.109: obsidian and evernote integrations, slack fixes, remove memory instrumentation 2026-03-09 10:40:37 -07:00
Vikhyath Mondreti
8c0a2e04b1 v0.5.108: workflow input params in agent tools, bun upgrade, dropdown selectors for 14 blocks 2026-03-06 21:02:25 -08:00
Waleed
6586c5ce40 v0.5.107: new reddit, slack tools 2026-03-05 22:48:20 -08:00
Vikhyath Mondreti
3ce947566d v0.5.106: condition block and legacy kbs fixes, GPT 5.4 2026-03-05 17:30:05 -08:00
Waleed
70c36cb7aa v0.5.105: slack remove reaction, nested subflow locks fix, servicenow pagination, memory improvements 2026-03-04 22:38:26 -08:00
Waleed
f1ec5fe824 v0.5.104: memory improvements, nested subflows, careers page redirect, brandfetch, google meet 2026-03-03 23:45:29 -08:00
Waleed
e07e3c34cc v0.5.103: memory util instrumentation, API docs, amplitude, google pagespeed insights, pagerduty 2026-03-01 23:27:02 -08:00
Waleed
0d2e6ff31d v0.5.102: new integrations, new tools, ci speedups, memory leak instrumentation 2026-02-28 12:48:10 -08:00
Waleed
4fd0989264 v0.5.101: circular dependency mitigation, confluence enhancements, google tasks and bigquery integrations, workflow lock 2026-02-26 15:04:53 -08:00
Waleed
67f8a687f6 v0.5.100: multiple credentials, 40% speedup, gong, attio, audit log improvements 2026-02-25 00:28:25 -08:00
Waleed
af592349d3 v0.5.99: local dev improvements, live workflow logs in terminal 2026-02-23 00:24:49 -08:00
Waleed
0d86ea01f0 v0.5.98: change detection improvements, rate limit and code execution fixes, removed retired models, hex integration 2026-02-21 18:07:40 -08:00
Waleed
115f04e989 v0.5.97: oidc discovery for copilot mcp 2026-02-21 02:06:25 -08:00
Waleed
34d92fae89 v0.5.96: sim oauth provider, slack ephemeral message tool and blockkit support 2026-02-20 18:22:20 -08:00
Waleed
67aa4bb332 v0.5.95: gemini 3.1 pro, cloudflare, dataverse, revenuecat, redis, upstash, algolia tools; isolated-vm robustness improvements, tables backend (#3271)
* feat(tools): advanced fields for youtube, vercel; added cloudflare and dataverse tools (#3257)

* refactor(vercel): mark optional fields as advanced mode

Move optional/power-user fields behind the advanced toggle:
- List Deployments: project filter, target, state
- Create Deployment: project ID override, redeploy from, target
- List Projects: search
- Create/Update Project: framework, build/output/install commands
- Env Vars: variable type
- Webhooks: project IDs filter
- Checks: path, details URL
- Team Members: role filter
- All operations: team ID scope

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* style(youtube): mark optional params as advanced mode

Hide pagination, sort order, and filter fields behind the advanced
toggle for a cleaner default UX across all YouTube operations.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* added advanced fields for vercel and youtube, added cloudflare and dataverse block

* addded desc for dataverse

* add more tools

* ack comment

* more

* ops

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>

* feat(tables): added tables (#2867)

* updates

* required

* trashy table viewer

* updates

* updates

* filtering ui

* updates

* updates

* updates

* one input mode

* format

* fix lints

* improved errors

* updates

* updates

* chages

* doc strings

* breaking down file

* update comments with ai

* updates

* comments

* changes

* revert

* updates

* dedupe

* updates

* updates

* updates

* refactoring

* renames & refactors

* refactoring

* updates

* undo

* update db

* wand

* updates

* fix comments

* fixes

* simplify comments

* u[dates

* renames

* better comments

* validation

* updates

* updates

* updates

* fix sorting

* fix appearnce

* updating prompt to make it user sort

* rm

* updates

* rename

* comments

* clean comments

* simplicifcaiton

* updates

* updates

* refactor

* reduced type confusion

* undo

* rename

* undo changes

* undo

* simplify

* updates

* updates

* revert

* updates

* db updates

* type fix

* fix

* fix error handling

* updates

* docs

* docs

* updates

* rename

* dedupe

* revert

* uncook

* updates

* fix

* fix

* fix

* fix

* prepare merge

* readd migrations

* add back missed code

* migrate enrichment logic to general abstraction

* address bugbot concerns

* adhere to size limits for tables

* remove conflicting migration

* add back migrations

* fix tables auth

* fix permissive auth

* fix lint

* reran migrations

* migrate to use tanstack query for all server state

* update table-selector

* update names

* added tables to permission groups, updated subblock types

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: waleed <walif6@gmail.com>

* fix(snapshot): changed insert to upsert when concurrent identical child workflows are running (#3259)

* fix(snapshot): changed insert to upsert when concurrent identical child workflows are running

* fixed ci tests failing

* fix(workflows): disallow duplicate workflow names at the same folder level (#3260)

* feat(tools): added redis, upstash, algolia, and revenuecat (#3261)

* feat(tools): added redis, upstash, algolia, and revenuecat

* ack comment

* feat(models): add gemini-3.1-pro-preview and update gemini-3-pro thinking levels (#3263)

* fix(audit-log): lazily resolve actor name/email when missing (#3262)

* fix(blocks): move type coercions from tools.config.tool to tools.config.params (#3264)

* fix(blocks): move type coercions from tools.config.tool to tools.config.params

Number() coercions in tools.config.tool ran at serialization time before
variable resolution, destroying dynamic references like <block.result.count>
by converting them to NaN/null. Moved all coercions to tools.config.params
which runs at execution time after variables are resolved.

Fixed in 15 blocks: exa, arxiv, sentry, incidentio, wikipedia, ahrefs,
posthog, elasticsearch, dropbox, hunter, lemlist, spotify, youtube, grafana,
parallel. Also added mode: 'advanced' to optional exa fields.

Closes #3258

* fix(blocks): address PR review — move remaining param mutations from tool() to params()

- Moved field mappings from tool() to params() in grafana, posthog,
  lemlist, spotify, dropbox (same dynamic reference bug)
- Fixed parallel.ts excerpts/full_content boolean logic
- Fixed parallel.ts search_queries empty case (must set undefined)
- Fixed elasticsearch.ts timeout not included when already ends with 's'
- Restored dropbox.ts tool() switch for proper default fallback

* fix(blocks): restore field renames to tool() for serialization-time validation

Field renames (e.g. personalApiKey→apiKey) must be in tool() because
validateRequiredFieldsBeforeExecution calls selectToolId()→tool() then
checks renamed field names on params. Only type coercions (Number(),
boolean) stay in params() to avoid destroying dynamic variable references.

* improvement(resolver): resovled empty sentinel to not pass through unexecuted valid refs to text inputs (#3266)

* fix(blocks): add required constraint for serviceDeskId in JSM block (#3268)

* fix(blocks): add required constraint for serviceDeskId in JSM block

* fix(blocks): rename custom field values to request field values in JSM create request

* fix(trigger): add isolated-vm support to trigger.dev container builds (#3269)

Scheduled workflow executions running in trigger.dev containers were
failing to spawn isolated-vm workers because the native module wasn't
available in the container. This caused loop condition evaluation to
silently fail and exit after one iteration.

- Add isolated-vm to build.external and additionalPackages in trigger config
- Include isolated-vm-worker.cjs via additionalFiles for child process spawning
- Add fallback path resolution for worker file in trigger.dev environment

* fix(tables): hide tables from sidebar and block registry (#3270)

* fix(tables): hide tables from sidebar and block registry

* fix(trigger): add isolated-vm support to trigger.dev container builds (#3269)

Scheduled workflow executions running in trigger.dev containers were
failing to spawn isolated-vm workers because the native module wasn't
available in the container. This caused loop condition evaluation to
silently fail and exit after one iteration.

- Add isolated-vm to build.external and additionalPackages in trigger config
- Include isolated-vm-worker.cjs via additionalFiles for child process spawning
- Add fallback path resolution for worker file in trigger.dev environment

* lint

* fix(trigger): update node version to align with main app (#3272)

* fix(build): fix corrupted sticky disk cache on blacksmith (#3273)

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: Lakee Sivaraya <71339072+lakeesiv@users.noreply.github.com>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
Co-authored-by: Vikhyath Mondreti <vikhyathvikku@gmail.com>
2026-02-20 13:43:07 -08:00
Waleed
15ace5e63f v0.5.94: vercel integration, folder insertion, migrated tracking redirects to rewrites 2026-02-18 16:53:34 -08:00
Waleed
fdca73679d v0.5.93: NextJS config changes, MCP and Blocks whitelisting, copilot keyboard shortcuts, audit logs 2026-02-18 12:10:05 -08:00
Waleed
da46a387c9 v0.5.92: shortlinks, copilot scrolling stickiness, pagination 2026-02-17 15:13:21 -08:00
Waleed
b7e377ec4b v0.5.91: docs i18n, turborepo upgrade 2026-02-16 00:36:05 -08:00
114 changed files with 2590 additions and 262 deletions

View File

@@ -0,0 +1,150 @@
---
title: Credential
---
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { Image } from '@/components/ui/image'
import { FAQ } from '@/components/ui/faq'
The Credential block has two operations: **Select Credential** picks a single OAuth credential and outputs its ID reference for downstream blocks; **List Credentials** returns all OAuth credentials in the workspace (optionally filtered by provider) as an array for iteration.
<div className="flex justify-center">
<Image
src="/static/blocks/credential.png"
alt="Credential Block"
width={400}
height={300}
className="my-6"
/>
</div>
<Callout>
The Credential block outputs credential **ID references**, not secrets. Downstream blocks receive the ID and resolve the actual OAuth token securely during their own execution.
</Callout>
## Configuration Options
### Operation
| Value | Description |
|---|---|
| **Select Credential** | Pick one OAuth credential and output its reference — use this to wire a single credential into downstream blocks |
| **List Credentials** | Return all OAuth credentials in the workspace as an array — use this with a ForEach loop |
### Credential (Select operation)
Select an OAuth credential from your workspace. The dropdown shows all connected OAuth accounts (Google, GitHub, Slack, etc.).
In advanced mode, paste a credential ID directly. You can copy a credential ID from your workspace's Credentials settings page.
### Provider (List operation)
Filter the returned OAuth credentials by provider. Select one or more providers from the dropdown — only providers you have credentials for will appear. Leave empty to return all OAuth credentials.
| Example | Returns |
|---|---|
| Gmail | Gmail credentials only |
| Slack | Slack credentials only |
| Gmail + Slack | Gmail and Slack credentials |
## Outputs
<Tabs items={['Select Credential', 'List Credentials']}>
<Tab>
| Output | Type | Description |
|---|---|---|
| `credentialId` | `string` | The credential ID — pipe this into other blocks' credential fields |
| `displayName` | `string` | Human-readable name (e.g. "waleed@company.com") |
| `providerId` | `string` | OAuth provider ID (e.g. `google-email`, `slack`) |
</Tab>
<Tab>
| Output | Type | Description |
|---|---|---|
| `credentials` | `json` | Array of OAuth credential objects (see shape below) |
| `count` | `number` | Number of credentials returned |
Each object in the `credentials` array:
| Field | Type | Description |
|---|---|---|
| `credentialId` | `string` | The credential ID |
| `displayName` | `string` | Human-readable name |
| `providerId` | `string` | OAuth provider ID |
</Tab>
</Tabs>
## Example Use Cases
**Shared credential across multiple blocks** — Define once, use everywhere
```
Credential (Select, Google) → Gmail (Send) & Google Drive (Upload) & Google Calendar (Create)
```
**Multi-account workflows** — Route to different credentials based on logic
```
Agent (Determine account) → Condition → Credential A or Credential B → Slack (Post)
```
**Iterate over all Gmail accounts**
```
Credential (List, Provider: Gmail) → ForEach Loop → Gmail (Send) using <loop.currentItem.credentialId>
```
<div className="flex justify-center">
<Image
src="/static/blocks/credential-loop.png"
alt="Credential List wired into a ForEach Loop"
width={900}
height={400}
className="my-6"
/>
</div>
## How to wire a Credential block
### Select Credential
1. Drop a **Credential** block and select your OAuth credential from the picker
2. In the downstream block, switch to **advanced mode** on its credential field
3. Enter `<credentialBlockName.credentialId>` as the value
<Tabs items={['Gmail', 'Slack']}>
<Tab>
In the Gmail block's credential field (advanced mode):
```
<myCredential.credentialId>
```
</Tab>
<Tab>
In the Slack block's credential field (advanced mode):
```
<myCredential.credentialId>
```
</Tab>
</Tabs>
### List Credentials
1. Drop a **Credential** block, set Operation to **List Credentials**
2. Optionally select one or more **Providers** to narrow results (only your connected providers appear)
3. Wire `<credentialBlockName.credentials>` into a **ForEach Loop** as the items source
4. Inside the loop, reference `<loop.currentItem.credentialId>` in downstream blocks' credential fields
## Best Practices
- **Define once, reference many times**: When five blocks use the same Google account, use one Credential block and wire all five to `<credential.credentialId>` instead of selecting the account five times
- **Outputs are safe to log**: The `credentialId` output is a UUID reference, not a secret. It is safe to inspect in execution logs
- **Use for environment switching**: Pair with a Condition block to route to a production or staging OAuth credential based on a workflow variable
- **Advanced mode is required**: Downstream blocks must be in advanced mode on their credential field to accept a dynamic reference
- **Use List + ForEach for fan-out**: When you need to run the same action across all accounts of a provider, List Credentials feeds naturally into a ForEach loop
- **Narrow by provider**: Use the Provider multiselect to filter to specific services — only providers you have credentials for are shown
<FAQ items={[
{ question: "Does the Credential block expose my secret or token?", answer: "No. The block outputs a credential ID (a UUID), not the actual OAuth token. Downstream blocks receive the ID and resolve the token securely in their own execution context. Secrets never appear in workflow state, logs, or the canvas." },
{ question: "What credential types does it support?", answer: "OAuth connected accounts only (Google, GitHub, Slack, etc.). Environment variables and service accounts cannot be resolved by ID in downstream blocks, so they are not supported." },
{ question: "How is Select different from just copying a credential ID into advanced mode?", answer: "Functionally identical — both pass the same credential ID to the downstream block. The Credential block adds value when you need to use one credential in many blocks (change it once), or when you want to select between credentials dynamically using a Condition block." },
{ question: "Can I list all OAuth credentials in my workspace?", answer: "Yes. Set the Operation to 'List Credentials'. Optionally filter by provider using the Provider multiselect. Wire the credentials output into a ForEach loop to process each credential individually." },
{ question: "Can I use a Credential block output in a Function block?", answer: "Yes. Reference <credential.credentialId> in your Function block's code. Note that the function will receive the raw UUID string — if you need the resolved token, the downstream block must handle the resolution (as integration blocks do). The Function block does not automatically resolve credential IDs." },
{ question: "What happens if the credential is deleted?", answer: "The Select operation will throw an error at execution time: 'Credential not found'. The List operation will simply omit the deleted credential from the results. Update the Credential block to select a valid credential before re-running." },
]} />

View File

@@ -4,6 +4,7 @@
"agent",
"api",
"condition",
"credential",
"evaluator",
"function",
"guardrails",

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

View File

@@ -1,16 +1,18 @@
'use client'
import { Suspense, useMemo, useRef, useState } from 'react'
import { Suspense, useEffect, useMemo, useRef, useState } from 'react'
import { Turnstile, type TurnstileInstance } from '@marsidev/react-turnstile'
import { createLogger } from '@sim/logger'
import { Eye, EyeOff, Loader2 } from 'lucide-react'
import Link from 'next/link'
import { useRouter, useSearchParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { Input, Label } from '@/components/emcn'
import { client, useSession } from '@/lib/auth/auth-client'
import { getEnv, isFalsy, isTruthy } from '@/lib/core/config/env'
import { cn } from '@/lib/core/utils/cn'
import { quickValidateEmail } from '@/lib/messaging/email/validation'
import { captureEvent } from '@/lib/posthog/client'
import { AUTH_SUBMIT_BTN } from '@/app/(auth)/components/auth-button-classes'
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
import { SSOLoginButton } from '@/app/(auth)/components/sso-login-button'
@@ -81,7 +83,12 @@ function SignupFormContent({
const router = useRouter()
const searchParams = useSearchParams()
const { refetch: refetchSession } = useSession()
const posthog = usePostHog()
const [isLoading, setIsLoading] = useState(false)
useEffect(() => {
captureEvent(posthog, 'signup_page_viewed', {})
}, [posthog])
const [showPassword, setShowPassword] = useState(false)
const [password, setPassword] = useState('')
const [passwordErrors, setPasswordErrors] = useState<string[]>([])

View File

@@ -0,0 +1,15 @@
'use client'
import { useEffect } from 'react'
import { usePostHog } from 'posthog-js/react'
import { captureEvent } from '@/lib/posthog/client'
export function LandingAnalytics() {
const posthog = usePostHog()
useEffect(() => {
captureEvent(posthog, 'landing_page_viewed', {})
}, [posthog])
return null
}

View File

@@ -13,6 +13,7 @@ import {
Templates,
Testimonials,
} from '@/app/(home)/components'
import { LandingAnalytics } from '@/app/(home)/landing-analytics'
/**
* Landing page root component.
@@ -45,6 +46,7 @@ export default async function Landing() {
>
Skip to main content
</a>
<LandingAnalytics />
<StructuredData />
<header>
<Navbar blogPosts={blogPosts} />

View File

@@ -7,6 +7,7 @@ import { generateAgentCard, generateSkillsFromWorkflow } from '@/lib/a2a/agent-c
import type { AgentCapabilities, AgentSkill } from '@/lib/a2a/types'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { getRedisClient } from '@/lib/core/config/redis'
import { captureServerEvent } from '@/lib/posthog/server'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
@@ -180,6 +181,17 @@ export async function DELETE(request: NextRequest, { params }: { params: Promise
logger.info(`Deleted A2A agent: ${agentId}`)
captureServerEvent(
auth.userId,
'a2a_agent_deleted',
{
agent_id: agentId,
workflow_id: existingAgent.workflowId,
workspace_id: existingAgent.workspaceId,
},
{ groups: { workspace: existingAgent.workspaceId } }
)
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting agent:', error)
@@ -251,6 +263,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
}
logger.info(`Published A2A agent: ${agentId}`)
captureServerEvent(
auth.userId,
'a2a_agent_published',
{
agent_id: agentId,
workflow_id: existingAgent.workflowId,
workspace_id: existingAgent.workspaceId,
},
{ groups: { workspace: existingAgent.workspaceId } }
)
return NextResponse.json({ success: true, isPublished: true })
}
@@ -273,6 +295,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<R
}
logger.info(`Unpublished A2A agent: ${agentId}`)
captureServerEvent(
auth.userId,
'a2a_agent_unpublished',
{
agent_id: agentId,
workflow_id: existingAgent.workflowId,
workspace_id: existingAgent.workspaceId,
},
{ groups: { workspace: existingAgent.workspaceId } }
)
return NextResponse.json({ success: true, isPublished: false })
}

View File

@@ -14,6 +14,7 @@ import { generateSkillsFromWorkflow } from '@/lib/a2a/agent-card'
import { A2A_DEFAULT_CAPABILITIES } from '@/lib/a2a/constants'
import { sanitizeAgentName } from '@/lib/a2a/utils'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { captureServerEvent } from '@/lib/posthog/server'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
@@ -201,6 +202,16 @@ export async function POST(request: NextRequest) {
logger.info(`Created A2A agent ${agentId} for workflow ${workflowId}`)
captureServerEvent(
auth.userId,
'a2a_agent_created',
{ agent_id: agentId, workflow_id: workflowId, workspace_id: workspaceId },
{
groups: { workspace: workspaceId },
setOnce: { first_a2a_agent_created_at: new Date().toISOString() },
}
)
return NextResponse.json({ success: true, agent }, { status: 201 })
} catch (error) {
logger.error('Error creating agent:', error)

View File

@@ -17,6 +17,7 @@ import {
hasUsableSubscriptionStatus,
} from '@/lib/billing/subscriptions/utils'
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('SwitchPlan')
@@ -173,6 +174,13 @@ export async function POST(request: NextRequest) {
interval: targetInterval,
})
captureServerEvent(
userId,
'subscription_changed',
{ from_plan: sub.plan ?? 'unknown', to_plan: targetPlanName, interval: targetInterval },
{ set: { plan: targetPlanName } }
)
return NextResponse.json({ success: true, plan: targetPlanName, interval: targetInterval })
} catch (error) {
logger.error('Failed to switch subscription', {

View File

@@ -27,6 +27,7 @@ import {
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { captureServerEvent } from '@/lib/posthog/server'
import {
authorizeWorkflowByWorkspacePermission,
resolveWorkflowIdForUser,
@@ -188,6 +189,22 @@ export async function POST(req: NextRequest) {
.warn('Failed to resolve workspaceId from workflow')
}
captureServerEvent(
authenticatedUserId,
'copilot_chat_sent',
{
workflow_id: workflowId,
workspace_id: resolvedWorkspaceId ?? '',
has_file_attachments: Array.isArray(fileAttachments) && fileAttachments.length > 0,
has_contexts: Array.isArray(contexts) && contexts.length > 0,
mode,
},
{
groups: resolvedWorkspaceId ? { workspace: resolvedWorkspaceId } : undefined,
setOnce: { first_copilot_use_at: new Date().toISOString() },
}
)
const userMessageIdToUse = userMessageId || crypto.randomUUID()
const reqLogger = logger.withMetadata({
requestId: tracker.requestId,

View File

@@ -11,6 +11,7 @@ import {
createRequestTracker,
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('CopilotFeedbackAPI')
@@ -76,6 +77,12 @@ export async function POST(req: NextRequest) {
duration: tracker.getDuration(),
})
captureServerEvent(authenticatedUserId, 'copilot_feedback_submitted', {
is_positive: isPositiveFeedback,
has_text_feedback: !!feedback,
has_workflow_yaml: !!workflowYaml,
})
return NextResponse.json({
success: true,
feedbackId: feedbackRecord.feedbackId,

View File

@@ -11,6 +11,7 @@ import {
syncPersonalEnvCredentialsForUser,
syncWorkspaceEnvCredentials,
} from '@/lib/credentials/environment'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('CredentialByIdAPI')
@@ -236,6 +237,17 @@ export async function DELETE(
envKeys: Object.keys(current),
})
captureServerEvent(
session.user.id,
'credential_deleted',
{
credential_type: 'env_personal',
provider_id: access.credential.envKey,
workspace_id: access.credential.workspaceId,
},
{ groups: { workspace: access.credential.workspaceId } }
)
return NextResponse.json({ success: true }, { status: 200 })
}
@@ -278,10 +290,33 @@ export async function DELETE(
actingUserId: session.user.id,
})
captureServerEvent(
session.user.id,
'credential_deleted',
{
credential_type: 'env_workspace',
provider_id: access.credential.envKey,
workspace_id: access.credential.workspaceId,
},
{ groups: { workspace: access.credential.workspaceId } }
)
return NextResponse.json({ success: true }, { status: 200 })
}
await db.delete(credential).where(eq(credential.id, id))
captureServerEvent(
session.user.id,
'credential_deleted',
{
credential_type: access.credential.type as 'oauth' | 'service_account',
provider_id: access.credential.providerId ?? id,
workspace_id: access.credential.workspaceId,
},
{ groups: { workspace: access.credential.workspaceId } }
)
return NextResponse.json({ success: true }, { status: 200 })
} catch (error) {
logger.error('Failed to delete credential', error)

View File

@@ -10,6 +10,7 @@ import { generateRequestId } from '@/lib/core/utils/request'
import { getWorkspaceMemberUserIds } from '@/lib/credentials/environment'
import { syncWorkspaceOAuthCredentialsForUser } from '@/lib/credentials/oauth'
import { getServiceConfigByProviderId } from '@/lib/oauth'
import { captureServerEvent } from '@/lib/posthog/server'
import { checkWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
import { isValidEnvVarName } from '@/executor/constants'
@@ -600,6 +601,16 @@ export async function POST(request: NextRequest) {
.where(eq(credential.id, credentialId))
.limit(1)
captureServerEvent(
session.user.id,
'credential_connected',
{ credential_type: type, provider_id: resolvedProviderId ?? type, workspace_id: workspaceId },
{
groups: { workspace: workspaceId },
setOnce: { first_credential_connected_at: new Date().toISOString() },
}
)
return NextResponse.json({ credential: created }, { status: 201 })
} catch (error: any) {
if (error?.code === '23505') {

View File

@@ -16,6 +16,7 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { deleteDocumentStorageFiles } from '@/lib/knowledge/documents/service'
import { cleanupUnusedTagDefinitions } from '@/lib/knowledge/tags/service'
import { captureServerEvent } from '@/lib/posthog/server'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
@@ -351,6 +352,19 @@ export async function DELETE(request: NextRequest, { params }: RouteParams) {
`[${requestId}] Deleted connector ${connectorId}${deleteDocuments ? ` and ${docCount} documents` : `, kept ${docCount} documents`}`
)
const kbWorkspaceId = writeCheck.knowledgeBase.workspaceId ?? ''
captureServerEvent(
auth.userId,
'knowledge_base_connector_removed',
{
knowledge_base_id: knowledgeBaseId,
workspace_id: kbWorkspaceId,
connector_type: existingConnector[0].connectorType,
documents_deleted: deleteDocuments ? docCount : 0,
},
kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : undefined
)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,

View File

@@ -7,6 +7,7 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { captureServerEvent } from '@/lib/posthog/server'
import { checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('ConnectorManualSyncAPI')
@@ -55,6 +56,18 @@ export async function POST(request: NextRequest, { params }: RouteParams) {
logger.info(`[${requestId}] Manual sync triggered for connector ${connectorId}`)
const kbWorkspaceId = writeCheck.knowledgeBase.workspaceId ?? ''
captureServerEvent(
auth.userId,
'knowledge_base_connector_synced',
{
knowledge_base_id: knowledgeBaseId,
workspace_id: kbWorkspaceId,
connector_type: connectorRows[0].connectorType,
},
kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : undefined
)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,

View File

@@ -11,6 +11,7 @@ import { generateRequestId } from '@/lib/core/utils/request'
import { dispatchSync } from '@/lib/knowledge/connectors/sync-engine'
import { allocateTagSlots } from '@/lib/knowledge/constants'
import { createTagDefinition } from '@/lib/knowledge/tags/service'
import { captureServerEvent } from '@/lib/posthog/server'
import { getCredential } from '@/app/api/auth/oauth/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
import { CONNECTOR_REGISTRY } from '@/connectors/registry'
@@ -227,6 +228,22 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Created connector ${connectorId} for KB ${knowledgeBaseId}`)
const kbWorkspaceId = writeCheck.knowledgeBase.workspaceId ?? ''
captureServerEvent(
auth.userId,
'knowledge_base_connector_added',
{
knowledge_base_id: knowledgeBaseId,
workspace_id: kbWorkspaceId,
connector_type: connectorType,
sync_interval_minutes: syncIntervalMinutes,
},
{
groups: kbWorkspaceId ? { workspace: kbWorkspaceId } : undefined,
setOnce: { first_connector_added_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId: writeCheck.knowledgeBase.workspaceId,
actorId: auth.userId,

View File

@@ -16,6 +16,7 @@ import {
type TagFilterCondition,
} from '@/lib/knowledge/documents/service'
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
import { captureServerEvent } from '@/lib/posthog/server'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
@@ -214,6 +215,8 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const kbWorkspaceId = accessCheck.knowledgeBase?.workspaceId
if (body.bulk === true) {
try {
const validatedData = BulkCreateDocumentsSchema.parse(body)
@@ -240,6 +243,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Silently fail
}
captureServerEvent(
userId,
'knowledge_base_document_uploaded',
{
knowledge_base_id: knowledgeBaseId,
workspace_id: kbWorkspaceId ?? '',
document_count: createdDocuments.length,
upload_type: 'bulk',
},
{
...(kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : {}),
setOnce: { first_document_uploaded_at: new Date().toISOString() },
}
)
processDocumentsWithQueue(
createdDocuments,
knowledgeBaseId,
@@ -314,6 +332,21 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Silently fail
}
captureServerEvent(
userId,
'knowledge_base_document_uploaded',
{
knowledge_base_id: knowledgeBaseId,
workspace_id: kbWorkspaceId ?? '',
document_count: 1,
upload_type: 'single',
},
{
...(kbWorkspaceId ? { groups: { workspace: kbWorkspaceId } } : {}),
setOnce: { first_document_uploaded_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId: accessCheck.knowledgeBase?.workspaceId ?? null,
actorId: userId,

View File

@@ -11,6 +11,7 @@ import {
KnowledgeBaseConflictError,
type KnowledgeBaseScope,
} from '@/lib/knowledge/service'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('KnowledgeBaseAPI')
@@ -115,6 +116,20 @@ export async function POST(req: NextRequest) {
// Telemetry should not fail the operation
}
captureServerEvent(
session.user.id,
'knowledge_base_created',
{
knowledge_base_id: newKnowledgeBase.id,
workspace_id: validatedData.workspaceId,
name: validatedData.name,
},
{
groups: { workspace: validatedData.workspaceId },
setOnce: { first_kb_created_at: new Date().toISOString() },
}
)
logger.info(
`[${requestId}] Knowledge base created: ${newKnowledgeBase.id} for user ${session.user.id}`
)

View File

@@ -18,6 +18,7 @@ import {
createMcpSuccessResponse,
generateMcpServerId,
} from '@/lib/mcp/utils'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('McpServersAPI')
@@ -180,6 +181,20 @@ export const POST = withMcpAuth('write')(
// Silently fail
}
const sourceParam = body.source as string | undefined
const source =
sourceParam === 'settings' || sourceParam === 'tool_input' ? sourceParam : undefined
captureServerEvent(
userId,
'mcp_server_connected',
{ workspace_id: workspaceId, server_name: body.name, transport: body.transport, source },
{
groups: { workspace: workspaceId },
setOnce: { first_mcp_connected_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId,
actorId: userId,
@@ -214,6 +229,9 @@ export const DELETE = withMcpAuth('admin')(
try {
const { searchParams } = new URL(request.url)
const serverId = searchParams.get('serverId')
const sourceParam = searchParams.get('source')
const source =
sourceParam === 'settings' || sourceParam === 'tool_input' ? sourceParam : undefined
if (!serverId) {
return createMcpErrorResponse(
@@ -242,6 +260,13 @@ export const DELETE = withMcpAuth('admin')(
logger.info(`[${requestId}] Successfully deleted MCP server: ${serverId}`)
captureServerEvent(
userId,
'mcp_server_disconnected',
{ workspace_id: workspaceId, server_name: deletedServer.name, source },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: userId,

View File

@@ -13,6 +13,7 @@ import {
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('MothershipChatAPI')
@@ -142,12 +143,41 @@ export async function PATCH(
return NextResponse.json({ success: false, error: 'Chat not found' }, { status: 404 })
}
if (title !== undefined && updatedChat.workspaceId) {
taskPubSub?.publishStatusChanged({
workspaceId: updatedChat.workspaceId,
chatId,
type: 'renamed',
})
if (updatedChat.workspaceId) {
if (title !== undefined) {
taskPubSub?.publishStatusChanged({
workspaceId: updatedChat.workspaceId,
chatId,
type: 'renamed',
})
captureServerEvent(
userId,
'task_renamed',
{ workspace_id: updatedChat.workspaceId },
{
groups: { workspace: updatedChat.workspaceId },
}
)
}
if (isUnread === false) {
captureServerEvent(
userId,
'task_marked_read',
{ workspace_id: updatedChat.workspaceId },
{
groups: { workspace: updatedChat.workspaceId },
}
)
} else if (isUnread === true) {
captureServerEvent(
userId,
'task_marked_unread',
{ workspace_id: updatedChat.workspaceId },
{
groups: { workspace: updatedChat.workspaceId },
}
)
}
}
return NextResponse.json({ success: true })
@@ -203,6 +233,14 @@ export async function DELETE(
chatId,
type: 'deleted',
})
captureServerEvent(
userId,
'task_deleted',
{ workspace_id: deletedChat.workspaceId },
{
groups: { workspace: deletedChat.workspaceId },
}
)
}
return NextResponse.json({ success: true })

View File

@@ -11,6 +11,7 @@ import {
createUnauthorizedResponse,
} from '@/lib/copilot/request-helpers'
import { taskPubSub } from '@/lib/copilot/task-events'
import { captureServerEvent } from '@/lib/posthog/server'
import { assertActiveWorkspaceAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('MothershipChatsAPI')
@@ -95,6 +96,15 @@ export async function POST(request: NextRequest) {
taskPubSub?.publishStatusChanged({ workspaceId, chatId: chat.id, type: 'created' })
captureServerEvent(
userId,
'task_created',
{ workspace_id: workspaceId },
{
groups: { workspace: workspaceId },
}
)
return NextResponse.json({ success: true, id: chat.id })
} catch (error) {
if (error instanceof z.ZodError) {

View File

@@ -4,6 +4,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { deleteSkill, listSkills, upsertSkills } from '@/lib/workflows/skills/operations'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -23,6 +24,7 @@ const SkillSchema = z.object({
})
),
workspaceId: z.string().optional(),
source: z.enum(['settings', 'tool_input']).optional(),
})
/** GET - Fetch all skills for a workspace */
@@ -75,7 +77,7 @@ export async function POST(req: NextRequest) {
const body = await req.json()
try {
const { skills, workspaceId } = SkillSchema.parse(body)
const { skills, workspaceId, source } = SkillSchema.parse(body)
if (!workspaceId) {
logger.warn(`[${requestId}] Missing workspaceId in request body`)
@@ -107,6 +109,12 @@ export async function POST(req: NextRequest) {
resourceName: skill.name,
description: `Created/updated skill "${skill.name}"`,
})
captureServerEvent(
userId,
'skill_created',
{ skill_id: skill.id, skill_name: skill.name, workspace_id: workspaceId, source },
{ groups: { workspace: workspaceId } }
)
}
return NextResponse.json({ success: true, data: resultSkills })
@@ -137,6 +145,9 @@ export async function DELETE(request: NextRequest) {
const searchParams = request.nextUrl.searchParams
const skillId = searchParams.get('id')
const workspaceId = searchParams.get('workspaceId')
const sourceParam = searchParams.get('source')
const source =
sourceParam === 'settings' || sourceParam === 'tool_input' ? sourceParam : undefined
try {
const authResult = await checkSessionOrInternalAuth(request, { requireWorkflowId: false })
@@ -180,6 +191,13 @@ export async function DELETE(request: NextRequest) {
description: `Deleted skill`,
})
captureServerEvent(
userId,
'skill_deleted',
{ skill_id: skillId, workspace_id: workspaceId, source },
{ groups: { workspace: workspaceId } }
)
logger.info(`[${requestId}] Deleted skill: ${skillId}`)
return NextResponse.json({ success: true })
} catch (error) {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import {
deleteTable,
NAME_PATTERN,
@@ -183,6 +184,13 @@ export async function DELETE(request: NextRequest, { params }: TableRouteParams)
await deleteTable(tableId, requestId)
captureServerEvent(
authResult.userId,
'table_deleted',
{ table_id: tableId, workspace_id: table.workspaceId },
{ groups: { workspace: table.workspaceId } }
)
return NextResponse.json({
success: true,
data: {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import {
createTable,
getWorkspaceTableLimits,
@@ -141,6 +142,20 @@ export async function POST(request: NextRequest) {
requestId
)
captureServerEvent(
authResult.userId,
'table_created',
{
table_id: table.id,
workspace_id: params.workspaceId,
column_count: params.schema.columns.length,
},
{
groups: { workspace: params.workspaceId },
setOnce: { first_table_created_at: new Date().toISOString() },
}
)
return NextResponse.json({
success: true,
data: {

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { upsertCustomTools } from '@/lib/workflows/custom-tools/operations'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -34,6 +35,7 @@ const CustomToolSchema = z.object({
})
),
workspaceId: z.string().optional(),
source: z.enum(['settings', 'tool_input']).optional(),
})
// GET - Fetch all custom tools for the workspace
@@ -135,7 +137,7 @@ export async function POST(req: NextRequest) {
try {
// Validate the request body
const { tools, workspaceId } = CustomToolSchema.parse(body)
const { tools, workspaceId, source } = CustomToolSchema.parse(body)
if (!workspaceId) {
logger.warn(`[${requestId}] Missing workspaceId in request body`)
@@ -168,6 +170,16 @@ export async function POST(req: NextRequest) {
})
for (const tool of resultTools) {
captureServerEvent(
userId,
'custom_tool_saved',
{ tool_id: tool.id, workspace_id: workspaceId, tool_name: tool.title, source },
{
groups: { workspace: workspaceId },
setOnce: { first_custom_tool_saved_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId,
actorId: userId,
@@ -205,6 +217,9 @@ export async function DELETE(request: NextRequest) {
const searchParams = request.nextUrl.searchParams
const toolId = searchParams.get('id')
const workspaceId = searchParams.get('workspaceId')
const sourceParam = searchParams.get('source')
const source =
sourceParam === 'settings' || sourceParam === 'tool_input' ? sourceParam : undefined
if (!toolId) {
logger.warn(`[${requestId}] Missing tool ID for deletion`)
@@ -278,6 +293,14 @@ export async function DELETE(request: NextRequest) {
// Delete the tool
await db.delete(customTools).where(eq(customTools.id, toolId))
const toolWorkspaceId = tool.workspaceId ?? workspaceId ?? ''
captureServerEvent(
userId,
'custom_tool_deleted',
{ tool_id: toolId, workspace_id: toolWorkspaceId, source },
toolWorkspaceId ? { groups: { workspace: toolWorkspaceId } } : undefined
)
recordAudit({
workspaceId: tool.workspaceId || undefined,
actorId: userId,

View File

@@ -8,6 +8,7 @@ import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateInteger } from '@/lib/core/security/input-validation'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { cleanupExternalWebhook } from '@/lib/webhooks/provider-subscriptions'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
@@ -274,6 +275,19 @@ export async function DELETE(
request,
})
const wsId = webhookData.workflow.workspaceId || undefined
captureServerEvent(
userId,
'webhook_trigger_deleted',
{
webhook_id: id,
workflow_id: webhookData.workflow.id,
provider: foundWebhook.provider || 'generic',
workspace_id: wsId ?? '',
},
wsId ? { groups: { workspace: wsId } } : undefined
)
return NextResponse.json({ success: true }, { status: 200 })
} catch (error: any) {
logger.error(`[${requestId}] Error deleting webhook`, {

View File

@@ -9,6 +9,7 @@ import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { getProviderIdFromServiceId } from '@/lib/oauth'
import { captureServerEvent } from '@/lib/posthog/server'
import { resolveEnvVarsInObject } from '@/lib/webhooks/env-resolver'
import {
cleanupExternalWebhook,
@@ -763,6 +764,19 @@ export async function POST(request: NextRequest) {
metadata: { provider, workflowId },
request,
})
const wsId = workflowRecord.workspaceId || undefined
captureServerEvent(
userId,
'webhook_trigger_created',
{
webhook_id: savedWebhook.id,
workflow_id: workflowId,
provider: provider || 'generic',
workspace_id: wsId ?? '',
},
wsId ? { groups: { workspace: wsId } } : undefined
)
}
const status = targetWebhookId ? 200 : 201

View File

@@ -3,6 +3,7 @@ import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { performFullDeploy, performFullUndeploy } from '@/lib/workflows/orchestration'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import {
@@ -96,6 +97,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Workflow deployed successfully: ${id}`)
captureServerEvent(
actorUserId,
'workflow_deployed',
{ workflow_id: id, workspace_id: workflowData!.workspaceId ?? '' },
{
groups: workflowData!.workspaceId ? { workspace: workflowData!.workspaceId } : undefined,
setOnce: { first_workflow_deployed_at: new Date().toISOString() },
}
)
const responseApiKeyInfo = workflowData!.workspaceId
? 'Workspace API keys'
: 'Personal API keys'
@@ -118,7 +129,11 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
const { id } = await params
try {
const { error, session } = await validateWorkflowPermissions(id, requestId, 'admin')
const {
error,
session,
workflow: workflowData,
} = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
@@ -148,6 +163,14 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
logger.info(`[${requestId}] Updated isPublicApi for workflow ${id} to ${isPublicApi}`)
const wsId = workflowData?.workspaceId
captureServerEvent(
session!.user.id,
'workflow_public_api_toggled',
{ workflow_id: id, workspace_id: wsId ?? '', is_public: isPublicApi },
wsId ? { groups: { workspace: wsId } } : undefined
)
return createSuccessResponse({ isPublicApi })
} catch (error: unknown) {
const message = error instanceof Error ? error.message : 'Failed to update deployment settings'
@@ -164,7 +187,11 @@ export async function DELETE(
const { id } = await params
try {
const { error, session } = await validateWorkflowPermissions(id, requestId, 'admin')
const {
error,
session,
workflow: workflowData,
} = await validateWorkflowPermissions(id, requestId, 'admin')
if (error) {
return createErrorResponse(error.message, error.status)
}
@@ -179,6 +206,14 @@ export async function DELETE(
return createErrorResponse(result.error || 'Failed to undeploy workflow', 500)
}
const wsId = workflowData?.workspaceId
captureServerEvent(
session!.user.id,
'workflow_undeployed',
{ workflow_id: id, workspace_id: wsId ?? '' },
wsId ? { groups: { workspace: wsId } } : undefined
)
return createSuccessResponse({
isDeployed: false,
deployedAt: null,

View File

@@ -5,6 +5,7 @@ import type { NextRequest } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { env } from '@/lib/core/config/env'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -104,6 +105,19 @@ export async function POST(
logger.error('Error sending workflow reverted event to socket server', e)
}
captureServerEvent(
session!.user.id,
'workflow_deployment_reverted',
{
workflow_id: id,
workspace_id: workflowRecord?.workspaceId ?? '',
version,
},
workflowRecord?.workspaceId
? { groups: { workspace: workflowRecord.workspaceId } }
: undefined
)
recordAudit({
workspaceId: workflowRecord?.workspaceId ?? null,
actorId: session!.user.id,

View File

@@ -4,6 +4,7 @@ import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { z } from 'zod'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { performActivateVersion } from '@/lib/workflows/orchestration'
import { validateWorkflowPermissions } from '@/lib/workflows/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
@@ -174,6 +175,14 @@ export async function PATCH(
}
}
const wsId = (workflowData as { workspaceId?: string } | null)?.workspaceId
captureServerEvent(
actorUserId,
'deployment_version_activated',
{ workflow_id: id, workspace_id: wsId ?? '', version: versionNum },
wsId ? { groups: { workspace: wsId } } : undefined
)
return createSuccessResponse({
success: true,
deployedAt: activateResult.deployedAt,

View File

@@ -5,6 +5,7 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { duplicateWorkflow } from '@/lib/workflows/persistence/duplicate'
const logger = createLogger('WorkflowDuplicateAPI')
@@ -60,6 +61,17 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
// Telemetry should not fail the operation
}
captureServerEvent(
userId,
'workflow_duplicated',
{
source_workflow_id: sourceWorkflowId,
new_workflow_id: result.id,
workspace_id: workspaceId ?? '',
},
workspaceId ? { groups: { workspace: workspaceId } } : undefined
)
const elapsed = Date.now() - startTime
logger.info(
`[${requestId}] Successfully duplicated workflow ${sourceWorkflowId} to ${result.id} in ${elapsed}ms`

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { markExecutionCancelled } from '@/lib/execution/cancellation'
import { abortManualExecution } from '@/lib/execution/manual-cancellation'
import { captureServerEvent } from '@/lib/posthog/server'
import { authorizeWorkflowByWorkspacePermission } from '@/lib/workflows/utils'
const logger = createLogger('CancelExecutionAPI')
@@ -60,6 +61,16 @@ export async function POST(
})
}
if (cancellation.durablyRecorded || locallyAborted) {
const workspaceId = workflowAuthorization.workflow?.workspaceId
captureServerEvent(
auth.userId,
'workflow_execution_cancelled',
{ workflow_id: workflowId, workspace_id: workspaceId ?? '' },
workspaceId ? { groups: { workspace: workspaceId } } : undefined
)
}
return NextResponse.json({
success: cancellation.durablyRecorded || locallyAborted,
executionId,

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { restoreWorkflow } from '@/lib/workflows/lifecycle'
import { getWorkflowById } from '@/lib/workflows/utils'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
@@ -58,6 +59,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
request,
})
captureServerEvent(
auth.userId,
'workflow_restored',
{ workflow_id: workflowId, workspace_id: workflowData.workspaceId ?? '' },
workflowData.workspaceId ? { groups: { workspace: workflowData.workspaceId } } : undefined
)
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`[${requestId}] Error restoring workflow ${workflowId}`, error)

View File

@@ -6,6 +6,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuthType, checkHybridAuth, checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { performDeleteWorkflow } from '@/lib/workflows/orchestration'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { authorizeWorkflowByWorkspacePermission, getWorkflowById } from '@/lib/workflows/utils'
@@ -225,6 +226,13 @@ export async function DELETE(
return NextResponse.json({ error: result.error }, { status })
}
captureServerEvent(
userId,
'workflow_deleted',
{ workflow_id: workflowId, workspace_id: workflowData.workspaceId ?? '' },
workflowData.workspaceId ? { groups: { workspace: workflowData.workspaceId } } : undefined
)
const elapsed = Date.now() - startTime
logger.info(`[${requestId}] Successfully archived workflow ${workflowId} in ${elapsed}ms`)

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { getNextWorkflowColor } from '@/lib/workflows/colors'
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
@@ -274,6 +275,16 @@ export async function POST(req: NextRequest) {
logger.info(`[${requestId}] Successfully created workflow ${workflowId} with default blocks`)
captureServerEvent(
userId,
'workflow_created',
{ workflow_id: workflowId, workspace_id: workspaceId ?? '', name },
{
groups: workspaceId ? { workspace: workspaceId } : undefined,
setOnce: { first_workflow_created_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId,
actorId: userId,

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceApiKeyAPI')
@@ -145,6 +146,13 @@ export async function DELETE(
const deletedKey = deletedRows[0]
captureServerEvent(
userId,
'api_key_revoked',
{ workspace_id: workspaceId, key_name: deletedKey.name },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: userId,

View File

@@ -10,12 +10,14 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceApiKeysAPI')
const CreateKeySchema = z.object({
name: z.string().trim().min(1, 'Name is required'),
source: z.enum(['settings', 'deploy_modal']).optional(),
})
const DeleteKeysSchema = z.object({
@@ -101,7 +103,7 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
const body = await request.json()
const { name } = CreateKeySchema.parse(body)
const { name, source } = CreateKeySchema.parse(body)
const existingKey = await db
.select()
@@ -158,6 +160,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
// Telemetry should not fail the operation
}
captureServerEvent(
userId,
'api_key_created',
{ workspace_id: workspaceId, key_name: name, source },
{
groups: { workspace: workspaceId },
setOnce: { first_api_key_created_at: new Date().toISOString() },
}
)
logger.info(`[${requestId}] Created workspace API key: ${name} in workspace ${workspaceId}`)
recordAudit({

View File

@@ -9,6 +9,7 @@ import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { decryptSecret, encryptSecret } from '@/lib/core/security/encryption'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions, getWorkspaceById } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceBYOKKeysAPI')
@@ -201,6 +202,16 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Created BYOK key for ${providerId} in workspace ${workspaceId}`)
captureServerEvent(
userId,
'byok_key_added',
{ workspace_id: workspaceId, provider_id: providerId },
{
groups: { workspace: workspaceId },
setOnce: { first_byok_key_added_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId,
actorId: userId,
@@ -272,6 +283,13 @@ export async function DELETE(
logger.info(`[${requestId}] Deleted BYOK key for ${providerId} from workspace ${workspaceId}`)
captureServerEvent(
userId,
'byok_key_removed',
{ workspace_id: workspaceId, provider_id: providerId },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: userId,

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { captureServerEvent } from '@/lib/posthog/server'
import {
FileConflictError,
listWorkspaceFiles,
@@ -116,6 +117,13 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
logger.info(`[${requestId}] Uploaded workspace file: ${fileName}`)
captureServerEvent(
session.user.id,
'file_uploaded',
{ workspace_id: workspaceId, file_type: rawFile.type || 'application/octet-stream' },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { encryptSecret } from '@/lib/core/security/encryption'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { MAX_EMAIL_RECIPIENTS, MAX_WORKFLOW_IDS } from '../constants'
@@ -342,6 +343,17 @@ export async function DELETE(request: NextRequest, { params }: RouteParams) {
request,
})
captureServerEvent(
session.user.id,
'notification_channel_deleted',
{
notification_id: notificationId,
notification_type: deletedSubscription.notificationType,
workspace_id: workspaceId,
},
{ groups: { workspace: workspaceId } }
)
return NextResponse.json({ success: true })
} catch (error) {
logger.error('Error deleting notification', { error })

View File

@@ -8,6 +8,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { encryptSecret } from '@/lib/core/security/encryption'
import { captureServerEvent } from '@/lib/posthog/server'
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
import { MAX_EMAIL_RECIPIENTS, MAX_NOTIFICATIONS_PER_TYPE, MAX_WORKFLOW_IDS } from './constants'
@@ -256,6 +257,17 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
type: data.notificationType,
})
captureServerEvent(
session.user.id,
'notification_channel_created',
{
workspace_id: workspaceId,
notification_type: data.notificationType,
alert_rule: data.alertConfig?.rule ?? null,
},
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -8,6 +8,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { syncWorkspaceEnvCredentials } from '@/lib/credentials/environment'
import { captureServerEvent } from '@/lib/posthog/server'
import {
getUsersWithPermissions,
hasWorkspaceAdminAccess,
@@ -188,6 +189,13 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
const updatedUsers = await getUsersWithPermissions(workspaceId)
for (const update of body.updates) {
captureServerEvent(
session.user.id,
'workspace_member_role_changed',
{ workspace_id: workspaceId, new_role: update.permissions },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -5,6 +5,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { captureServerEvent } from '@/lib/posthog/server'
import { archiveWorkspace } from '@/lib/workspaces/lifecycle'
const logger = createLogger('WorkspaceByIdAPI')
@@ -292,6 +293,13 @@ export async function DELETE(
request,
})
captureServerEvent(
session.user.id,
'workspace_deleted',
{ workspace_id: workspaceId, workflow_count: workflowIds.length },
{ groups: { workspace: workspaceId } }
)
return NextResponse.json({ success: true })
} catch (error) {
logger.error(`Error deleting workspace ${workspaceId}:`, error)

View File

@@ -19,6 +19,7 @@ import { PlatformEvents } from '@/lib/core/telemetry'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getFromEmailAddress } from '@/lib/messaging/email/utils'
import { captureServerEvent } from '@/lib/posthog/server'
import { getWorkspaceById } from '@/lib/workspaces/permissions/utils'
import {
InvitationsNotAllowedError,
@@ -214,6 +215,16 @@ export async function POST(req: NextRequest) {
// Telemetry should not fail the operation
}
captureServerEvent(
session.user.id,
'workspace_member_invited',
{ workspace_id: workspaceId, invitee_role: permission },
{
groups: { workspace: workspaceId },
setOnce: { first_invitation_sent_at: new Date().toISOString() },
}
)
await sendInvitationEmail({
to: email,
inviterName: session.user.name || session.user.email || 'A user',

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { revokeWorkspaceCredentialMemberships } from '@/lib/credentials/access'
import { captureServerEvent } from '@/lib/posthog/server'
import { hasWorkspaceAdminAccess } from '@/lib/workspaces/permissions/utils'
const logger = createLogger('WorkspaceMemberAPI')
@@ -105,6 +106,13 @@ export async function DELETE(req: NextRequest, { params }: { params: Promise<{ i
await revokeWorkspaceCredentialMemberships(workspaceId, userId)
captureServerEvent(
session.user.id,
'workspace_member_removed',
{ workspace_id: workspaceId, is_self_removal: isSelf },
{ groups: { workspace: workspaceId } }
)
recordAudit({
workspaceId,
actorId: session.user.id,

View File

@@ -7,6 +7,7 @@ import { z } from 'zod'
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
import { getSession } from '@/lib/auth'
import { PlatformEvents } from '@/lib/core/telemetry'
import { captureServerEvent } from '@/lib/posthog/server'
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
import { getRandomWorkspaceColor } from '@/lib/workspaces/colors'
@@ -96,6 +97,16 @@ export async function POST(req: Request) {
const newWorkspace = await createWorkspace(session.user.id, name, skipDefaultWorkflow, color)
captureServerEvent(
session.user.id,
'workspace_created',
{ workspace_id: newWorkspace.id, name: newWorkspace.name },
{
groups: { workspace: newWorkspace.id },
setOnce: { first_workspace_created_at: new Date().toISOString() },
}
)
recordAudit({
workspaceId: newWorkspace.id,
actorId: session.user.id,

View File

@@ -26,6 +26,7 @@ export function NavTour() {
steps: navTourSteps,
triggerEvent: START_NAV_TOUR_EVENT,
tourName: 'Navigation tour',
tourType: 'nav',
disabled: isWorkflowPage,
})

View File

@@ -2,7 +2,9 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { usePostHog } from 'posthog-js/react'
import { ACTIONS, type CallBackProps, EVENTS, STATUS, type Step } from 'react-joyride'
import { captureEvent } from '@/lib/posthog/client'
const logger = createLogger('useTour')
@@ -16,6 +18,8 @@ interface UseTourOptions {
triggerEvent?: string
/** Identifier for logging */
tourName?: string
/** Analytics tour type for PostHog events */
tourType?: 'nav' | 'workflow'
/** When true, stops a running tour (e.g. navigating away from the relevant page) */
disabled?: boolean
}
@@ -45,8 +49,10 @@ export function useTour({
steps,
triggerEvent,
tourName = 'tour',
tourType,
disabled = false,
}: UseTourOptions): UseTourReturn {
const posthog = usePostHog()
const [run, setRun] = useState(false)
const [stepIndex, setStepIndex] = useState(0)
const [tourKey, setTourKey] = useState(0)
@@ -152,6 +158,9 @@ export function useTour({
setRun(true)
logger.info(`${tourName} triggered via event`)
scheduleReveal()
if (tourType) {
captureEvent(posthog, 'tour_started', { tour_type: tourType })
}
}, 50)
}
@@ -181,6 +190,13 @@ export function useTour({
if (status === STATUS.FINISHED || status === STATUS.SKIPPED) {
stopTour()
logger.info(`${tourName} ended`, { status })
if (tourType) {
if (status === STATUS.FINISHED) {
captureEvent(posthog, 'tour_completed', { tour_type: tourType })
} else {
captureEvent(posthog, 'tour_skipped', { tour_type: tourType, step_index: index })
}
}
return
}
@@ -188,6 +204,9 @@ export function useTour({
if (action === ACTIONS.CLOSE) {
stopTour()
logger.info(`${tourName} closed by user`)
if (tourType) {
captureEvent(posthog, 'tour_skipped', { tour_type: tourType, step_index: index })
}
return
}
@@ -203,7 +222,7 @@ export function useTour({
transitionToStep(nextIndex)
}
},
[stopTour, transitionToStep, steps, tourName]
[stopTour, transitionToStep, steps, tourName, tourType, posthog]
)
return {

View File

@@ -26,6 +26,7 @@ export function WorkflowTour() {
steps: workflowTourSteps,
triggerEvent: START_WORKFLOW_TOUR_EVENT,
tourName: 'Workflow tour',
tourType: 'workflow',
})
const tourState = useMemo<TourState>(

View File

@@ -353,7 +353,17 @@ const TemplateCard = memo(function TemplateCard({ template, onSelect }: Template
return (
<button
type='button'
onClick={() => onSelect(template.prompt)}
onClick={() => {
import('@/lib/posthog/client')
.then(({ captureClientEvent }) => {
captureClientEvent('template_used', {
template_title: template.title,
template_modules: template.modules.join(' '),
})
})
.catch(() => {})
onSelect(template.prompt)
}}
aria-label={`Select template: ${template.title}`}
className='group flex cursor-pointer flex-col text-left'
>

View File

@@ -3,6 +3,7 @@
import { useCallback, useEffect, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useParams, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { PanelLeft } from '@/components/emcn/icons'
import { useSession } from '@/lib/auth/auth-client'
import {
@@ -10,6 +11,7 @@ import {
type LandingWorkflowSeed,
LandingWorkflowSeedStorage,
} from '@/lib/core/utils/browser-storage'
import { captureEvent } from '@/lib/posthog/client'
import { persistImportedWorkflow } from '@/lib/workflows/operations/import-export'
import { useChatHistory, useMarkTaskRead } from '@/hooks/queries/tasks'
import type { ChatContext } from '@/stores/panel'
@@ -27,6 +29,8 @@ export function Home({ chatId }: HomeProps = {}) {
const { workspaceId } = useParams<{ workspaceId: string }>()
const router = useRouter()
const { data: session } = useSession()
const posthog = usePostHog()
const posthogRef = useRef(posthog)
const [initialPrompt, setInitialPrompt] = useState('')
const hasCheckedLandingStorageRef = useRef(false)
const initialViewInputRef = useRef<HTMLDivElement>(null)
@@ -199,11 +203,21 @@ export function Home({ chatId }: HomeProps = {}) {
return () => cancelAnimationFrame(id)
}, [resources])
useEffect(() => {
posthogRef.current = posthog
}, [posthog])
const handleSubmit = useCallback(
(text: string, fileAttachments?: FileAttachmentForApi[], contexts?: ChatContext[]) => {
const trimmed = text.trim()
if (!trimmed && !(fileAttachments && fileAttachments.length > 0)) return
captureEvent(posthogRef.current, 'task_message_sent', {
has_attachments: !!(fileAttachments && fileAttachments.length > 0),
has_contexts: !!(contexts && contexts.length > 0),
is_new_task: !chatId,
})
if (initialViewInputRef.current) {
setIsInputEntering(true)
}

View File

@@ -5,6 +5,7 @@ import { createLogger } from '@sim/logger'
import { format } from 'date-fns'
import { AlertCircle, Loader2, Pencil, Plus, Tag, X } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import {
Badge,
Button,
@@ -28,6 +29,7 @@ import { ALL_TAG_SLOTS, type AllTagSlot, getFieldTypeForSlot } from '@/lib/knowl
import type { DocumentSortField, SortOrder } from '@/lib/knowledge/documents/types'
import { type FilterFieldType, getOperatorsForFieldType } from '@/lib/knowledge/filters/types'
import type { DocumentData } from '@/lib/knowledge/types'
import { captureEvent } from '@/lib/posthog/client'
import { formatFileSize } from '@/lib/uploads/utils/file-utils'
import type {
BreadcrumbItem,
@@ -190,6 +192,15 @@ export function KnowledgeBase({
}: KnowledgeBaseProps) {
const params = useParams()
const workspaceId = propWorkspaceId || (params.workspaceId as string)
const posthog = usePostHog()
useEffect(() => {
captureEvent(posthog, 'knowledge_base_opened', {
knowledge_base_id: id,
knowledge_base_name: passedKnowledgeBaseName ?? 'Unknown',
})
}, [id, passedKnowledgeBaseName, posthog])
useOAuthReturnForKBConnectors(id)
const { removeKnowledgeBase } = useKnowledgeBasesList(workspaceId, { enabled: false })
const userPermissions = useUserPermissionsContext()

View File

@@ -2,6 +2,7 @@
import { useEffect } from 'react'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
/**
@@ -11,6 +12,12 @@ export function WorkspaceScopeSync() {
const { workspaceId } = useParams<{ workspaceId: string }>()
const hydrationWorkspaceId = useWorkflowRegistry((state) => state.hydration.workspaceId)
const switchToWorkspace = useWorkflowRegistry((state) => state.switchToWorkspace)
const posthog = usePostHog()
useEffect(() => {
if (!workspaceId) return
posthog?.group('workspace', workspaceId)
}, [posthog, workspaceId])
useEffect(() => {
if (!workspaceId || hydrationWorkspaceId === workspaceId) {

View File

@@ -1,9 +1,12 @@
'use client'
import { useEffect } from 'react'
import dynamic from 'next/dynamic'
import { useSearchParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { Skeleton } from '@/components/emcn'
import { useSession } from '@/lib/auth/auth-client'
import { captureEvent } from '@/lib/posthog/client'
import { AdminSkeleton } from '@/app/workspace/[workspaceId]/settings/components/admin/admin-skeleton'
import { ApiKeysSkeleton } from '@/app/workspace/[workspaceId]/settings/components/api-keys/api-key-skeleton'
import { BYOKSkeleton } from '@/app/workspace/[workspaceId]/settings/components/byok/byok-skeleton'
@@ -160,6 +163,7 @@ export function SettingsPage({ section }: SettingsPageProps) {
const searchParams = useSearchParams()
const mcpServerId = searchParams.get('mcpServerId')
const { data: session, isPending: sessionLoading } = useSession()
const posthog = usePostHog()
const isAdminRole = session?.user?.role === 'admin'
const effectiveSection =
@@ -174,6 +178,11 @@ export function SettingsPage({ section }: SettingsPageProps) {
const label =
allNavigationItems.find((item) => item.id === effectiveSection)?.label ?? effectiveSection
useEffect(() => {
if (sessionLoading) return
captureEvent(posthog, 'settings_tab_viewed', { section: effectiveSection })
}, [effectiveSection, sessionLoading, posthog])
return (
<div>
<h2 className='mb-7 font-medium text-[22px] text-[var(--text-primary)]'>{label}</h2>

View File

@@ -26,6 +26,7 @@ interface CreateApiKeyModalProps {
allowPersonalApiKeys?: boolean
canManageWorkspaceKeys?: boolean
defaultKeyType?: 'personal' | 'workspace'
source?: 'settings' | 'deploy_modal'
onKeyCreated?: (key: ApiKey) => void
}
@@ -41,6 +42,7 @@ export function CreateApiKeyModal({
allowPersonalApiKeys = true,
canManageWorkspaceKeys = false,
defaultKeyType = 'personal',
source = 'settings',
onKeyCreated,
}: CreateApiKeyModalProps) {
const [keyName, setKeyName] = useState('')
@@ -74,6 +76,7 @@ export function CreateApiKeyModal({
workspaceId,
name: trimmedName,
keyType,
source,
})
setNewKey(data.key)

View File

@@ -3,6 +3,7 @@
import React, { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { GripVertical } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import {
Button,
Checkbox,
@@ -39,6 +40,7 @@ import {
TypeText,
} from '@/components/emcn/icons'
import { cn } from '@/lib/core/utils/cn'
import { captureEvent } from '@/lib/posthog/client'
import type { ColumnDefinition, Filter, SortDirection, TableRow as TableRowType } from '@/lib/table'
import type { ColumnOption, SortConfig } from '@/app/workspace/[workspaceId]/components'
import { ResourceHeader, ResourceOptionsBar } from '@/app/workspace/[workspaceId]/components'
@@ -177,6 +179,12 @@ export function Table({
const router = useRouter()
const workspaceId = propWorkspaceId || (params.workspaceId as string)
const tableId = propTableId || (params.tableId as string)
const posthog = usePostHog()
useEffect(() => {
if (!tableId || !workspaceId) return
captureEvent(posthog, 'table_opened', { table_id: tableId, workspace_id: workspaceId })
}, [tableId, workspaceId, posthog])
const [queryOptions, setQueryOptions] = useState<QueryOptions>({
filter: null,

View File

@@ -915,6 +915,7 @@ export function DeployModal({
allowPersonalApiKeys={allowPersonalApiKeys}
canManageWorkspaceKeys={canManageWorkspaceKeys}
defaultKeyType={defaultKeyType}
source='deploy_modal'
/>
{workflowId && (

View File

@@ -59,14 +59,10 @@ interface ComboBoxProps {
/** Configuration for the sub-block */
config: SubBlockConfig
/** Async function to fetch options dynamically */
fetchOptions?: (
blockId: string,
subBlockId: string
) => Promise<Array<{ label: string; id: string }>>
fetchOptions?: (blockId: string) => Promise<Array<{ label: string; id: string }>>
/** Async function to fetch a single option's label by ID (for hydration) */
fetchOptionById?: (
blockId: string,
subBlockId: string,
optionId: string
) => Promise<{ label: string; id: string } | null>
/** Field dependencies that trigger option refetch when changed */
@@ -135,7 +131,7 @@ export const ComboBox = memo(function ComboBox({
setIsLoadingOptions(true)
setFetchError(null)
try {
const options = await fetchOptions(blockId, subBlockId)
const options = await fetchOptions(blockId)
setFetchedOptions(options)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch options'
@@ -144,7 +140,7 @@ export const ComboBox = memo(function ComboBox({
} finally {
setIsLoadingOptions(false)
}
}, [fetchOptions, blockId, subBlockId, isPreview, disabled])
}, [fetchOptions, blockId, isPreview, disabled])
// Determine the active value based on mode (preview vs. controlled vs. store)
const value = isPreview ? previewValue : propValue !== undefined ? propValue : storeValue
@@ -363,7 +359,7 @@ export const ComboBox = memo(function ComboBox({
let isActive = true
// Fetch the hydrated option
fetchOptionById(blockId, subBlockId, valueToHydrate)
fetchOptionById(blockId, valueToHydrate)
.then((option) => {
if (isActive) setHydratedOption(option)
})
@@ -378,7 +374,6 @@ export const ComboBox = memo(function ComboBox({
fetchOptionById,
value,
blockId,
subBlockId,
isPreview,
disabled,
fetchedOptions,

View File

@@ -1,7 +1,7 @@
'use client'
import { createElement, useCallback, useMemo, useState } from 'react'
import { ExternalLink, Users } from 'lucide-react'
import { ExternalLink, KeyRound, Users } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button, Combobox } from '@/components/emcn/components'
import { getSubscriptionAccessState } from '@/lib/billing/client'
@@ -22,7 +22,7 @@ import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/c
import type { SubBlockConfig } from '@/blocks/types'
import { CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSets } from '@/hooks/queries/credential-sets'
import { useWorkspaceCredential } from '@/hooks/queries/credentials'
import { useWorkspaceCredential, useWorkspaceCredentials } from '@/hooks/queries/credentials'
import { useOAuthCredentials } from '@/hooks/queries/oauth/oauth-credentials'
import { useOrganizations } from '@/hooks/queries/organization'
import { useSubscriptionData } from '@/hooks/queries/subscription'
@@ -60,6 +60,7 @@ export function CredentialSelector({
const requiredScopes = subBlock.requiredScopes || []
const label = subBlock.placeholder || 'Select credential'
const serviceId = subBlock.serviceId || ''
const isAllCredentials = !serviceId
const supportsCredentialSets = subBlock.supportsCredentialSets || false
const { data: organizationsData } = useOrganizations()
@@ -101,14 +102,22 @@ export function CredentialSelector({
const {
data: rawCredentials = [],
isFetching: credentialsLoading,
isFetching: oauthCredentialsLoading,
refetch: refetchCredentials,
} = useOAuthCredentials(effectiveProviderId, {
enabled: Boolean(effectiveProviderId),
enabled: !isAllCredentials && Boolean(effectiveProviderId),
workspaceId,
workflowId: activeWorkflowId || undefined,
})
const {
data: allWorkspaceCredentials = [],
isFetching: allCredentialsLoading,
refetch: refetchAllCredentials,
} = useWorkspaceCredentials({ workspaceId, enabled: isAllCredentials })
const credentialsLoading = isAllCredentials ? allCredentialsLoading : oauthCredentialsLoading
const credentials = useMemo(
() =>
isTriggerMode
@@ -122,9 +131,17 @@ export function CredentialSelector({
[credentials, selectedId]
)
const selectedAllCredential = useMemo(
() =>
isAllCredentials ? (allWorkspaceCredentials.find((c) => c.id === selectedId) ?? null) : null,
[isAllCredentials, allWorkspaceCredentials, selectedId]
)
const isServiceAccount = useMemo(
() => selectedCredential?.type === 'service_account',
[selectedCredential]
() =>
selectedCredential?.type === 'service_account' ||
selectedAllCredential?.type === 'service_account',
[selectedCredential, selectedAllCredential]
)
const selectedCredentialSet = useMemo(
@@ -134,37 +151,45 @@ export function CredentialSelector({
const { data: inaccessibleCredential } = useWorkspaceCredential(
selectedId || undefined,
Boolean(selectedId) && !selectedCredential && !credentialsLoading && Boolean(workspaceId)
Boolean(selectedId) &&
!selectedCredential &&
!selectedAllCredential &&
!credentialsLoading &&
Boolean(workspaceId)
)
const inaccessibleCredentialName = inaccessibleCredential?.displayName ?? null
const resolvedLabel = useMemo(() => {
if (selectedCredentialSet) return selectedCredentialSet.name
if (selectedAllCredential) return selectedAllCredential.displayName
if (selectedCredential) return selectedCredential.name
if (inaccessibleCredentialName) return inaccessibleCredentialName
return ''
}, [selectedCredentialSet, selectedCredential, inaccessibleCredentialName])
}, [selectedCredentialSet, selectedAllCredential, selectedCredential, inaccessibleCredentialName])
const displayValue = isEditing ? editingValue : resolvedLabel
useCredentialRefreshTriggers(refetchCredentials, effectiveProviderId, workspaceId)
const refetch = useCallback(
() => (isAllCredentials ? refetchAllCredentials() : refetchCredentials()),
[isAllCredentials, refetchAllCredentials, refetchCredentials]
)
useCredentialRefreshTriggers(refetch, effectiveProviderId, workspaceId)
const handleOpenChange = useCallback(
(isOpen: boolean) => {
if (isOpen) {
void refetchCredentials()
}
if (isOpen) void refetch()
},
[refetchCredentials]
[refetch]
)
const hasSelection = Boolean(selectedCredential)
const missingRequiredScopes = hasSelection
const hasOAuthSelection = Boolean(selectedCredential)
const missingRequiredScopes = hasOAuthSelection
? getMissingRequiredScopes(selectedCredential!, requiredScopes || [])
: []
const needsUpdate =
hasSelection &&
hasOAuthSelection &&
!isServiceAccount &&
missingRequiredScopes.length > 0 &&
!effectiveDisabled &&
@@ -218,6 +243,12 @@ export function CredentialSelector({
}, [])
const { comboboxOptions, comboboxGroups } = useMemo(() => {
if (isAllCredentials) {
const oauthCredentials = allWorkspaceCredentials.filter((c) => c.type === 'oauth')
const options = oauthCredentials.map((cred) => ({ label: cred.displayName, value: cred.id }))
return { comboboxOptions: options, comboboxGroups: undefined }
}
const pollingProviderId = getPollingProviderFromOAuth(effectiveProviderId)
// Handle both old ('gmail') and new ('google-email') provider IDs for backwards compatibility
const matchesProvider = (csProviderId: string | null) => {
@@ -281,6 +312,8 @@ export function CredentialSelector({
return { comboboxOptions: options, comboboxGroups: undefined }
}, [
isAllCredentials,
allWorkspaceCredentials,
credentials,
provider,
effectiveProviderId,
@@ -306,6 +339,17 @@ export function CredentialSelector({
)
}
if (isAllCredentials && selectedAllCredential) {
return (
<div className='flex w-full items-center truncate'>
<div className='mr-2 flex-shrink-0 opacity-90'>
<KeyRound className='h-3 w-3' />
</div>
<span className='truncate'>{displayValue}</span>
</div>
)
}
return (
<div className='flex w-full items-center truncate'>
<div className='mr-2 flex-shrink-0 opacity-90'>
@@ -320,7 +364,8 @@ export function CredentialSelector({
selectedCredentialProvider,
isCredentialSetSelected,
selectedCredentialSet,
isServiceAccount,
isAllCredentials,
selectedAllCredential,
])
const handleComboboxChange = useCallback(
@@ -339,7 +384,9 @@ export function CredentialSelector({
}
}
const matchedCred = credentials.find((c) => c.id === value)
const matchedCred = (
isAllCredentials ? allWorkspaceCredentials.filter((c) => c.type === 'oauth') : credentials
).find((c) => c.id === value)
if (matchedCred) {
handleSelect(value)
return
@@ -348,7 +395,15 @@ export function CredentialSelector({
setIsEditing(true)
setEditingValue(value)
},
[credentials, credentialSets, handleAddCredential, handleSelect, handleCredentialSetSelect]
[
isAllCredentials,
allWorkspaceCredentials,
credentials,
credentialSets,
handleAddCredential,
handleSelect,
handleCredentialSetSelect,
]
)
return (

View File

@@ -52,14 +52,10 @@ interface DropdownProps {
/** Enable multi-select mode */
multiSelect?: boolean
/** Async function to fetch options dynamically */
fetchOptions?: (
blockId: string,
subBlockId: string
) => Promise<Array<{ label: string; id: string }>>
fetchOptions?: (blockId: string) => Promise<Array<{ label: string; id: string }>>
/** Async function to fetch a single option's label by ID (for hydration) */
fetchOptionById?: (
blockId: string,
subBlockId: string,
optionId: string
) => Promise<{ label: string; id: string } | null>
/** Field dependencies that trigger option refetch when changed */
@@ -160,7 +156,7 @@ export const Dropdown = memo(function Dropdown({
setIsLoadingOptions(true)
setFetchError(null)
try {
const options = await fetchOptions(blockId, subBlockId)
const options = await fetchOptions(blockId)
setFetchedOptions(options)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Failed to fetch options'
@@ -169,7 +165,7 @@ export const Dropdown = memo(function Dropdown({
} finally {
setIsLoadingOptions(false)
}
}, [fetchOptions, blockId, subBlockId, isPreview, disabled])
}, [fetchOptions, blockId, isPreview, disabled])
/**
* Handles combobox open state changes to trigger option fetching
@@ -430,7 +426,7 @@ export const Dropdown = memo(function Dropdown({
let isActive = true
// Fetch the hydrated option
fetchOptionById(blockId, subBlockId, valueToHydrate)
fetchOptionById(blockId, valueToHydrate)
.then((option) => {
if (isActive) setHydratedOption(option)
})
@@ -446,7 +442,6 @@ export const Dropdown = memo(function Dropdown({
singleValue,
multiSelect,
blockId,
subBlockId,
isPreview,
disabled,
fetchedOptions,

View File

@@ -14,9 +14,11 @@ import {
Unlock,
} from 'lucide-react'
import { useParams } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import { useShallow } from 'zustand/react/shallow'
import { useStoreWithEqualityFn } from 'zustand/traditional'
import { Button, Tooltip } from '@/components/emcn'
import { captureEvent } from '@/lib/posthog/client'
import {
buildCanonicalIndex,
evaluateSubBlockCondition,
@@ -106,6 +108,7 @@ export function Editor() {
const params = useParams()
const workspaceId = params.workspaceId as string
const posthog = usePostHog()
const subBlocksRef = useRef<HTMLDivElement>(null)
@@ -298,7 +301,11 @@ export function Editor() {
const handleOpenDocs = useCallback(() => {
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
window.open(docsLink || 'https://docs.sim.ai/quick-reference', '_blank', 'noopener,noreferrer')
}, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink])
captureEvent(posthog, 'docs_opened', {
source: 'editor_button',
block_type: currentBlock?.type,
})
}, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink, posthog, currentBlock?.type])
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null

View File

@@ -12,7 +12,9 @@ import {
} from 'react'
import clsx from 'clsx'
import { Search } from 'lucide-react'
import { usePostHog } from 'posthog-js/react'
import { Button } from '@/components/emcn'
import { captureEvent } from '@/lib/posthog/client'
import {
getBlocksForSidebar,
getTriggersForSidebar,
@@ -348,6 +350,7 @@ export const Toolbar = memo(
triggersHeaderRef,
})
const posthog = usePostHog()
const { filterBlocks } = usePermissionConfig()
const sandboxAllowedBlocks = useSandboxBlockConstraints()
@@ -541,8 +544,12 @@ export const Toolbar = memo(
const handleViewDocumentation = useCallback(() => {
if (activeItemInfo?.docsLink) {
window.open(activeItemInfo.docsLink, '_blank', 'noopener,noreferrer')
captureEvent(posthog, 'docs_opened', {
source: 'toolbar_context_menu',
block_type: activeItemInfo.type,
})
}
}, [activeItemInfo])
}, [activeItemInfo, posthog])
/**
* Handle clicks outside the context menu to close it

View File

@@ -6,6 +6,7 @@ import { Compass, MoreHorizontal } from 'lucide-react'
import Image from 'next/image'
import Link from 'next/link'
import { useParams, usePathname, useRouter } from 'next/navigation'
import { usePostHog } from 'posthog-js/react'
import {
Blimp,
Button,
@@ -39,6 +40,7 @@ import { useSession } from '@/lib/auth/auth-client'
import { cn } from '@/lib/core/utils/cn'
import { isMacPlatform } from '@/lib/core/utils/platform'
import { buildFolderTree } from '@/lib/folders/tree'
import { captureEvent } from '@/lib/posthog/client'
import {
START_NAV_TOUR_EVENT,
START_WORKFLOW_TOUR_EVENT,
@@ -315,6 +317,7 @@ export const Sidebar = memo(function Sidebar() {
const fileInputRef = useRef<HTMLInputElement>(null)
const scrollContainerRef = useRef<HTMLDivElement>(null)
const posthog = usePostHog()
const { data: sessionData, isPending: sessionLoading } = useSession()
const { canEdit } = useUserPermissionsContext()
const { config: permissionConfig, filterBlocks } = usePermissionConfig()
@@ -1092,10 +1095,10 @@ export const Sidebar = memo(function Sidebar() {
const handleOpenHelpFromMenu = useCallback(() => setIsHelpModalOpen(true), [])
const handleOpenDocs = useCallback(
() => window.open('https://docs.sim.ai', '_blank', 'noopener,noreferrer'),
[]
)
const handleOpenDocs = useCallback(() => {
window.open('https://docs.sim.ai', '_blank', 'noopener,noreferrer')
captureEvent(posthog, 'docs_opened', { source: 'help_menu' })
}, [posthog])
const handleTaskRenameBlur = useCallback(
() => void taskFlyoutRename.saveRename(),

View File

@@ -0,0 +1,65 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { task } from '@trigger.dev/sdk'
import { eq } from 'drizzle-orm'
import { getEmailSubject, renderOnboardingFollowupEmail } from '@/components/emails'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { checkEnterprisePlan } from '@/lib/billing/subscriptions/utils'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getPersonalEmailFrom } from '@/lib/messaging/email/utils'
import { LIFECYCLE_EMAIL_TASK_ID, type LifecycleEmailType } from '@/lib/messaging/lifecycle'
const logger = createLogger('LifecycleEmail')
interface LifecycleEmailParams {
userId: string
type: LifecycleEmailType
}
async function sendLifecycleEmail({ userId, type }: LifecycleEmailParams): Promise<void> {
const [userData] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
if (!userData?.email) {
logger.warn('[lifecycle-email] User not found or has no email', { userId, type })
return
}
const subscription = await getHighestPrioritySubscription(userId)
if (checkEnterprisePlan(subscription)) {
logger.info('[lifecycle-email] Skipping lifecycle email for enterprise user', { userId, type })
return
}
const { from, replyTo } = getPersonalEmailFrom()
let html: string
switch (type) {
case 'onboarding-followup':
html = await renderOnboardingFollowupEmail(userData.name || undefined)
break
default:
logger.warn('[lifecycle-email] Unknown lifecycle email type', { type })
return
}
await sendEmail({
to: userData.email,
subject: getEmailSubject(type),
html,
from,
replyTo,
emailType: 'notifications',
})
logger.info('[lifecycle-email] Sent lifecycle email', { userId, type })
}
export const lifecycleEmailTask = task({
id: LIFECYCLE_EMAIL_TASK_ID,
retry: { maxAttempts: 2 },
run: async (params: LifecycleEmailParams) => {
await sendLifecycleEmail(params)
},
})

View File

@@ -0,0 +1,151 @@
import { CredentialIcon } from '@/components/icons'
import { getServiceConfigByProviderId } from '@/lib/oauth/utils'
import { getQueryClient } from '@/app/_shell/providers/get-query-client'
import type { BlockConfig } from '@/blocks/types'
import { fetchWorkspaceCredentialList, workspaceCredentialKeys } from '@/hooks/queries/credentials'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
interface CredentialBlockOutput {
success: boolean
output: {
credentialId: string
displayName: string
providerId: string
credentials: Array<{
credentialId: string
displayName: string
providerId: string
}>
count: number
}
}
export const CredentialBlock: BlockConfig<CredentialBlockOutput> = {
type: 'credential',
name: 'Credential',
description: 'Select or list OAuth credentials',
longDescription:
'Select an OAuth credential once and pipe its ID into any downstream block that requires authentication, or list all OAuth credentials in the workspace for iteration. No secrets are ever exposed — only credential IDs and metadata.',
bestPractices: `
- Use "Select Credential" to define an OAuth credential once and reference <CredentialBlock.credentialId> in multiple downstream blocks instead of repeating credential IDs.
- Use "List Credentials" with a ForEach loop to iterate over all OAuth accounts (e.g. all Gmail accounts).
- Use the Provider filter to narrow results to specific services (e.g. Gmail, Slack).
- The outputs are credential ID references, not secret values — they are safe to log and inspect.
- To switch credentials across environments, replace the single Credential block rather than updating every downstream block.
`,
docsLink: 'https://docs.sim.ai/blocks/credential',
bgColor: '#6366F1',
icon: CredentialIcon,
category: 'blocks',
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Select Credential', id: 'select' },
{ label: 'List Credentials', id: 'list' },
],
value: () => 'select',
},
{
id: 'providerFilter',
title: 'Provider',
type: 'dropdown',
multiSelect: true,
options: [],
condition: { field: 'operation', value: 'list' },
fetchOptions: async () => {
const workspaceId = useWorkflowRegistry.getState().hydration.workspaceId
if (!workspaceId) return []
const credentials = await getQueryClient().fetchQuery({
queryKey: workspaceCredentialKeys.list(workspaceId),
queryFn: () => fetchWorkspaceCredentialList(workspaceId),
staleTime: 60 * 1000,
})
const seen = new Set<string>()
const options: Array<{ label: string; id: string }> = []
for (const cred of credentials) {
if (cred.type === 'oauth' && cred.providerId && !seen.has(cred.providerId)) {
seen.add(cred.providerId)
const serviceConfig = getServiceConfigByProviderId(cred.providerId)
options.push({ label: serviceConfig?.name ?? cred.providerId, id: cred.providerId })
}
}
return options.sort((a, b) => a.label.localeCompare(b.label))
},
fetchOptionById: async (_blockId: string, optionId: string) => {
const serviceConfig = getServiceConfigByProviderId(optionId)
const label = serviceConfig?.name ?? optionId
return { label, id: optionId }
},
},
{
id: 'credential',
title: 'Credential',
type: 'oauth-input',
required: { field: 'operation', value: 'select' },
mode: 'basic',
placeholder: 'Select a credential',
canonicalParamId: 'credentialId',
condition: { field: 'operation', value: 'select' },
},
{
id: 'manualCredential',
title: 'Credential ID',
type: 'short-input',
required: { field: 'operation', value: 'select' },
mode: 'advanced',
placeholder: 'Enter credential ID',
canonicalParamId: 'credentialId',
condition: { field: 'operation', value: 'select' },
},
],
tools: {
access: [],
},
inputs: {
operation: { type: 'string', description: "'select' or 'list'" },
credentialId: {
type: 'string',
description: 'The OAuth credential ID to resolve (select operation)',
},
providerFilter: {
type: 'json',
description:
'Array of OAuth provider IDs to filter by (e.g. ["google-email", "slack"]). Leave empty to return all OAuth credentials.',
},
},
outputs: {
credentialId: {
type: 'string',
description: "Credential ID — pipe into other blocks' credential fields",
condition: { field: 'operation', value: 'select' },
},
displayName: {
type: 'string',
description: 'Human-readable name of the credential',
condition: { field: 'operation', value: 'select' },
},
providerId: {
type: 'string',
description: 'OAuth provider ID (e.g. google-email, slack)',
condition: { field: 'operation', value: 'select' },
},
credentials: {
type: 'json',
description:
'Array of OAuth credential objects, each with credentialId, displayName, and providerId',
condition: { field: 'operation', value: 'list' },
},
count: {
type: 'number',
description: 'Number of credentials returned',
condition: { field: 'operation', value: 'list' },
},
},
}

View File

@@ -26,6 +26,7 @@ import { ClerkBlock } from '@/blocks/blocks/clerk'
import { CloudflareBlock } from '@/blocks/blocks/cloudflare'
import { ConditionBlock } from '@/blocks/blocks/condition'
import { ConfluenceBlock, ConfluenceV2Block } from '@/blocks/blocks/confluence'
import { CredentialBlock } from '@/blocks/blocks/credential'
import { CursorBlock, CursorV2Block } from '@/blocks/blocks/cursor'
import { DatabricksBlock } from '@/blocks/blocks/databricks'
import { DatadogBlock } from '@/blocks/blocks/datadog'
@@ -243,6 +244,7 @@ export const registry: Record<string, BlockConfig> = {
clay: ClayBlock,
clerk: ClerkBlock,
condition: ConditionBlock,
credential: CredentialBlock,
confluence: ConfluenceBlock,
confluence_v2: ConfluenceV2Block,
cursor: CursorBlock,

View File

@@ -421,15 +421,11 @@ export interface SubBlockConfig {
triggerId?: string
// Dropdown/Combobox: Function to fetch options dynamically
// Works with both 'dropdown' (select-only) and 'combobox' (editable with expression support)
fetchOptions?: (
blockId: string,
subBlockId: string
) => Promise<Array<{ label: string; id: string }>>
fetchOptions?: (blockId: string) => Promise<Array<{ label: string; id: string }>>
// Dropdown/Combobox: Function to fetch a single option's label by ID (for hydration)
// Called when component mounts with a stored value to display the correct label before options load
fetchOptionById?: (
blockId: string,
subBlockId: string,
optionId: string
) => Promise<{ label: string; id: string } | null>
}

View File

@@ -267,3 +267,24 @@ export const baseStyles = {
margin: '8px 0',
},
}
/** Styles for plain personal emails (no branding, no EmailLayout) */
export const plainEmailStyles = {
body: {
fontFamily: '-apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif',
backgroundColor: '#ffffff',
margin: '0',
padding: '0',
},
container: {
maxWidth: '560px',
margin: '40px auto',
padding: '0 24px',
},
p: {
fontSize: '15px',
lineHeight: '1.6',
color: '#1a1a1a',
margin: '0 0 16px',
},
} as const

View File

@@ -1 +1 @@
export { baseStyles, colors, spacing, typography } from './base'
export { baseStyles, colors, plainEmailStyles, spacing, typography } from './base'

View File

@@ -1,3 +1,4 @@
export { OnboardingFollowupEmail } from './onboarding-followup-email'
export { OTPVerificationEmail } from './otp-verification-email'
export { ResetPasswordEmail } from './reset-password-email'
export { WelcomeEmail } from './welcome-email'

View File

@@ -0,0 +1,38 @@
import { Body, Head, Html, Preview, Text } from '@react-email/components'
import { plainEmailStyles as styles } from '@/components/emails/_styles'
interface OnboardingFollowupEmailProps {
userName?: string
}
export function OnboardingFollowupEmail({ userName }: OnboardingFollowupEmailProps) {
return (
<Html>
<Head />
<Preview>Quick question</Preview>
<Body style={styles.body}>
<div style={styles.container}>
<Text style={styles.p}>{userName ? `Hey ${userName},` : 'Hey,'}</Text>
<Text style={styles.p}>
It&apos;s been a few days since you signed up. I hope you&apos;re enjoying Sim!
</Text>
<Text style={styles.p}>
I&apos;d love to know what did you expect when you signed up vs. what did you get?
</Text>
<Text style={styles.p}>
A reply with your thoughts would really help us improve the product for everyone.
</Text>
<Text style={styles.p}>
Thanks,
<br />
Emir
<br />
Founder, Sim
</Text>
</div>
</Body>
</Html>
)
}
export default OnboardingFollowupEmail

View File

@@ -0,0 +1,33 @@
import { Body, Head, Html, Preview, Text } from '@react-email/components'
import { plainEmailStyles as styles } from '@/components/emails/_styles'
interface AbandonedCheckoutEmailProps {
userName?: string
}
export function AbandonedCheckoutEmail({ userName }: AbandonedCheckoutEmailProps) {
return (
<Html>
<Head />
<Preview>Did you run into an issue with your upgrade?</Preview>
<Body style={styles.body}>
<div style={styles.container}>
<Text style={styles.p}>{userName ? `Hi ${userName},` : 'Hi,'}</Text>
<Text style={styles.p}>
I saw that you tried to upgrade your Sim plan but didn&apos;t end up completing it.
</Text>
<Text style={styles.p}>
Did you run into an issue, or did you have a question? Here to help.
</Text>
<Text style={styles.p}>
Emir
<br />
Founder, Sim
</Text>
</div>
</Body>
</Html>
)
}
export default AbandonedCheckoutEmail

View File

@@ -0,0 +1,7 @@
/** Pro plan features shown in billing upgrade emails */
export const proFeatures = [
{ label: '6,000 credits/month', desc: 'included' },
{ label: '+50 daily refresh', desc: 'credits per day' },
{ label: '150 runs/min', desc: 'sync executions' },
{ label: '50GB storage', desc: 'for files & assets' },
] as const

View File

@@ -0,0 +1,102 @@
import { Link, Section, Text } from '@react-email/components'
import { baseStyles, colors, typography } from '@/components/emails/_styles'
import { proFeatures } from '@/components/emails/billing/constants'
import { EmailLayout } from '@/components/emails/components'
import { dollarsToCredits } from '@/lib/billing/credits/conversion'
import { getBrandConfig } from '@/ee/whitelabeling'
interface CreditsExhaustedEmailProps {
userName?: string
limit: number
upgradeLink: string
}
export function CreditsExhaustedEmail({
userName,
limit,
upgradeLink,
}: CreditsExhaustedEmailProps) {
const brand = getBrandConfig()
return (
<EmailLayout
preview={`You've used all ${dollarsToCredits(limit).toLocaleString()} of your free ${brand.name} credits`}
showUnsubscribe={true}
>
<Text style={{ ...baseStyles.paragraph, marginTop: 0 }}>
{userName ? `Hi ${userName},` : 'Hi,'}
</Text>
<Text style={baseStyles.paragraph}>
You&apos;ve used all <strong>{dollarsToCredits(limit).toLocaleString()}</strong> of your
free credits on {brand.name}. Your workflows are paused until you upgrade.
</Text>
<Section
style={{
backgroundColor: '#f8faf9',
border: `1px solid ${colors.brandTertiary}20`,
borderRadius: '8px',
padding: '16px 20px',
margin: '16px 0',
}}
>
<Text
style={{
fontSize: '14px',
fontWeight: 600,
color: colors.brandTertiary,
fontFamily: typography.fontFamily,
margin: '0 0 12px 0',
textTransform: 'uppercase' as const,
letterSpacing: '0.5px',
}}
>
Pro includes
</Text>
<table style={{ width: '100%', borderCollapse: 'collapse' }}>
<tbody>
{proFeatures.map((feature, i) => (
<tr key={i}>
<td
style={{
padding: '6px 0',
fontSize: '15px',
fontWeight: 600,
color: colors.textPrimary,
fontFamily: typography.fontFamily,
width: '45%',
}}
>
{feature.label}
</td>
<td
style={{
padding: '6px 0',
fontSize: '14px',
color: colors.textMuted,
fontFamily: typography.fontFamily,
}}
>
{feature.desc}
</td>
</tr>
))}
</tbody>
</table>
</Section>
<Link href={upgradeLink} style={{ textDecoration: 'none' }}>
<Text style={baseStyles.button}>Upgrade to Pro</Text>
</Link>
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
One-time notification when free credits are exhausted.
</Text>
</EmailLayout>
)
}
export default CreditsExhaustedEmail

View File

@@ -1,5 +1,6 @@
import { Link, Section, Text } from '@react-email/components'
import { baseStyles, colors, typography } from '@/components/emails/_styles'
import { proFeatures } from '@/components/emails/billing/constants'
import { EmailLayout } from '@/components/emails/components'
import { dollarsToCredits } from '@/lib/billing/credits/conversion'
import { getBrandConfig } from '@/ee/whitelabeling'
@@ -12,13 +13,6 @@ interface FreeTierUpgradeEmailProps {
upgradeLink: string
}
const proFeatures = [
{ label: '6,000 credits/month', desc: 'included' },
{ label: '+50 daily refresh', desc: 'credits per day' },
{ label: '150 runs/min', desc: 'sync executions' },
{ label: '50GB storage', desc: 'for files & assets' },
]
export function FreeTierUpgradeEmail({
userName,
percentUsed,
@@ -105,7 +99,7 @@ export function FreeTierUpgradeEmail({
<div style={baseStyles.divider} />
<Text style={{ ...baseStyles.footerText, textAlign: 'left' }}>
One-time notification at 90% usage.
One-time notification at 80% usage.
</Text>
</EmailLayout>
)

View File

@@ -1,4 +1,6 @@
export { AbandonedCheckoutEmail } from './abandoned-checkout-email'
export { CreditPurchaseEmail } from './credit-purchase-email'
export { CreditsExhaustedEmail } from './credits-exhausted-email'
export { EnterpriseSubscriptionEmail } from './enterprise-subscription-email'
export { FreeTierUpgradeEmail } from './free-tier-upgrade-email'
export { PaymentFailedEmail } from './payment-failed-email'

View File

@@ -41,8 +41,9 @@ export function EmailLayout({
{/* Header with logo */}
<Section style={baseStyles.header}>
<Img
src={brand.logoUrl || `${baseUrl}/brand/color/email/type.png`}
width='70'
src={brand.logoUrl || `${baseUrl}/brand/color/email/wordmark.png`}
width='107'
height='33'
alt={brand.name}
style={{ display: 'block' }}
/>

View File

@@ -1,7 +1,14 @@
import { render } from '@react-email/components'
import { OTPVerificationEmail, ResetPasswordEmail, WelcomeEmail } from '@/components/emails/auth'
import {
OnboardingFollowupEmail,
OTPVerificationEmail,
ResetPasswordEmail,
WelcomeEmail,
} from '@/components/emails/auth'
import {
AbandonedCheckoutEmail,
CreditPurchaseEmail,
CreditsExhaustedEmail,
EnterpriseSubscriptionEmail,
FreeTierUpgradeEmail,
PaymentFailedEmail,
@@ -159,6 +166,22 @@ export async function renderWelcomeEmail(userName?: string): Promise<string> {
return await render(WelcomeEmail({ userName }))
}
export async function renderOnboardingFollowupEmail(userName?: string): Promise<string> {
return await render(OnboardingFollowupEmail({ userName }))
}
export async function renderAbandonedCheckoutEmail(userName?: string): Promise<string> {
return await render(AbandonedCheckoutEmail({ userName }))
}
export async function renderCreditsExhaustedEmail(params: {
userName?: string
limit: number
upgradeLink: string
}): Promise<string> {
return await render(CreditsExhaustedEmail(params))
}
export async function renderCreditPurchaseEmail(params: {
userName?: string
amount: number

View File

@@ -16,6 +16,9 @@ export type EmailSubjectType =
| 'plan-welcome-pro'
| 'plan-welcome-team'
| 'credit-purchase'
| 'abandoned-checkout'
| 'free-tier-exhausted'
| 'onboarding-followup'
| 'welcome'
/**
@@ -48,13 +51,19 @@ export function getEmailSubject(type: EmailSubjectType): string {
case 'usage-threshold':
return `You're nearing your monthly budget on ${brandName}`
case 'free-tier-upgrade':
return `You're at 90% of your free credits on ${brandName}`
return `You're at 80% of your free credits on ${brandName}`
case 'plan-welcome-pro':
return `Your Pro plan is now active on ${brandName}`
case 'plan-welcome-team':
return `Your Team plan is now active on ${brandName}`
case 'credit-purchase':
return `Credits added to your ${brandName} account`
case 'abandoned-checkout':
return `Quick question`
case 'free-tier-exhausted':
return `You've run out of free credits on ${brandName}`
case 'onboarding-followup':
return `Quick question about ${brandName}`
case 'welcome':
return `Welcome to ${brandName}`
default:

View File

@@ -124,6 +124,29 @@ export function ConditionalIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function CredentialIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} viewBox='0 0 24 24' fill='none' xmlns='http://www.w3.org/2000/svg'>
<circle cx='8' cy='15' r='4' stroke='currentColor' strokeWidth='1.75' />
<path d='M11.83 13.17L20 5' stroke='currentColor' strokeWidth='1.75' strokeLinecap='round' />
<path
d='M18 7l2 2'
stroke='currentColor'
strokeWidth='1.75'
strokeLinecap='round'
strokeLinejoin='round'
/>
<path
d='M15 10l2 2'
stroke='currentColor'
strokeWidth='1.75'
strokeLinecap='round'
strokeLinejoin='round'
/>
</svg>
)
}
export function NoteIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -35,6 +35,8 @@ export enum BlockType {
WORKFLOW = 'workflow',
WORKFLOW_INPUT = 'workflow_input',
CREDENTIAL = 'credential',
WAIT = 'wait',
NOTE = 'note',

View File

@@ -21,6 +21,7 @@ vi.mock('@/lib/core/config/feature-flags', () => ({
isEmailVerificationEnabled: false,
isBillingEnabled: false,
isOrganizationsEnabled: false,
isAccessControlEnabled: false,
}))
vi.mock('@/providers/utils', () => ({
@@ -110,6 +111,12 @@ vi.mock('@sim/db/schema', () => ({
},
}))
const mockGetCustomToolById = vi.fn()
vi.mock('@/lib/workflows/custom-tools/operations', () => ({
getCustomToolById: (...args: unknown[]) => mockGetCustomToolById(...args),
}))
setupGlobalFetchMock()
const mockGetAllBlocks = getAllBlocks as Mock
@@ -1957,49 +1964,22 @@ describe('AgentBlockHandler', () => {
const staleInlineCode = 'return { title, content };'
const dbCode = 'return { title, content, format };'
function mockFetchForCustomTool(toolId: string) {
mockFetch.mockImplementation((url: string) => {
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
function mockDBForCustomTool(toolId: string) {
mockGetCustomToolById.mockImplementation(({ toolId: id }: { toolId: string }) => {
if (id === toolId) {
return Promise.resolve({
ok: true,
headers: { get: () => null },
json: () =>
Promise.resolve({
data: [
{
id: toolId,
title: 'formatReport',
schema: dbSchema,
code: dbCode,
},
],
}),
id: toolId,
title: 'formatReport',
schema: dbSchema,
code: dbCode,
})
}
return Promise.resolve({
ok: true,
headers: { get: () => null },
json: () => Promise.resolve({}),
})
return Promise.resolve(null)
})
}
function mockFetchFailure() {
mockFetch.mockImplementation((url: string) => {
if (typeof url === 'string' && url.includes('/api/tools/custom')) {
return Promise.resolve({
ok: false,
status: 500,
headers: { get: () => null },
json: () => Promise.resolve({}),
})
}
return Promise.resolve({
ok: true,
headers: { get: () => null },
json: () => Promise.resolve({}),
})
})
function mockDBFailure() {
mockGetCustomToolById.mockRejectedValue(new Error('DB connection failed'))
}
beforeEach(() => {
@@ -2008,11 +1988,13 @@ describe('AgentBlockHandler', () => {
writable: true,
configurable: true,
})
mockGetCustomToolById.mockReset()
mockContext.userId = 'test-user'
})
it('should always fetch latest schema from DB when customToolId is present', async () => {
const toolId = 'custom-tool-123'
mockFetchForCustomTool(toolId)
mockDBForCustomTool(toolId)
const inputs = {
model: 'gpt-4o',
@@ -2046,7 +2028,7 @@ describe('AgentBlockHandler', () => {
it('should fetch from DB when customToolId has no inline schema', async () => {
const toolId = 'custom-tool-123'
mockFetchForCustomTool(toolId)
mockDBForCustomTool(toolId)
const inputs = {
model: 'gpt-4o',
@@ -2075,7 +2057,7 @@ describe('AgentBlockHandler', () => {
})
it('should fall back to inline schema when DB fetch fails and inline exists', async () => {
mockFetchFailure()
mockDBFailure()
const inputs = {
model: 'gpt-4o',
@@ -2107,7 +2089,7 @@ describe('AgentBlockHandler', () => {
})
it('should return null when DB fetch fails and no inline schema exists', async () => {
mockFetchFailure()
mockDBFailure()
const inputs = {
model: 'gpt-4o',
@@ -2135,7 +2117,7 @@ describe('AgentBlockHandler', () => {
it('should use DB schema when customToolId resolves', async () => {
const toolId = 'custom-tool-123'
mockFetchForCustomTool(toolId)
mockDBForCustomTool(toolId)
const inputs = {
model: 'gpt-4o',
@@ -2185,10 +2167,7 @@ describe('AgentBlockHandler', () => {
await handler.execute(mockContext, mockBlock, inputs)
const customToolFetches = mockFetch.mock.calls.filter(
(call: any[]) => typeof call[0] === 'string' && call[0].includes('/api/tools/custom')
)
expect(customToolFetches.length).toBe(0)
expect(mockGetCustomToolById).not.toHaveBeenCalled()
expect(mockExecuteProviderRequest).toHaveBeenCalled()
const providerCall = mockExecuteProviderRequest.mock.calls[0]

View File

@@ -3,6 +3,7 @@ import { mcpServers } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, inArray, isNull } from 'drizzle-orm'
import { createMcpToolId } from '@/lib/mcp/utils'
import { getCustomToolById } from '@/lib/workflows/custom-tools/operations'
import { getAllBlocks } from '@/blocks'
import type { BlockOutput } from '@/blocks/types'
import {
@@ -277,39 +278,18 @@ export class AgentBlockHandler implements BlockHandler {
ctx: ExecutionContext,
customToolId: string
): Promise<{ schema: any; title: string } | null> {
if (!ctx.userId) {
logger.error('Cannot fetch custom tool without userId:', { customToolId })
return null
}
try {
const headers = await buildAuthHeaders(ctx.userId)
const params: Record<string, string> = {}
if (ctx.workspaceId) {
params.workspaceId = ctx.workspaceId
}
if (ctx.workflowId) {
params.workflowId = ctx.workflowId
}
if (ctx.userId) {
params.userId = ctx.userId
}
const url = buildAPIUrl('/api/tools/custom', params)
const response = await fetch(url.toString(), {
method: 'GET',
headers,
const tool = await getCustomToolById({
toolId: customToolId,
userId: ctx.userId,
workspaceId: ctx.workspaceId,
})
if (!response.ok) {
await response.text().catch(() => {})
logger.error(`Failed to fetch custom tools: ${response.status}`)
return null
}
const data = await response.json()
if (!data.data || !Array.isArray(data.data)) {
logger.error('Invalid custom tools API response')
return null
}
const tool = data.data.find((t: any) => t.id === customToolId)
if (!tool) {
logger.warn(`Custom tool not found by ID: ${customToolId}`)
return null

View File

@@ -0,0 +1,111 @@
import { db } from '@sim/db'
import { credential } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, asc, eq, inArray } from 'drizzle-orm'
import type { BlockOutput } from '@/blocks/types'
import { BlockType } from '@/executor/constants'
import type { BlockHandler, ExecutionContext } from '@/executor/types'
import type { SerializedBlock } from '@/serializer/types'
const logger = createLogger('CredentialBlockHandler')
export class CredentialBlockHandler implements BlockHandler {
canHandle(block: SerializedBlock): boolean {
return block.metadata?.id === BlockType.CREDENTIAL
}
async execute(
ctx: ExecutionContext,
_block: SerializedBlock,
inputs: Record<string, unknown>
): Promise<BlockOutput> {
if (!ctx.workspaceId) {
throw new Error('workspaceId is required for credential resolution')
}
const operation = typeof inputs.operation === 'string' ? inputs.operation : 'select'
if (operation === 'list') {
return this.listCredentials(ctx.workspaceId, inputs)
}
return this.selectCredential(ctx.workspaceId, inputs)
}
private async selectCredential(
workspaceId: string,
inputs: Record<string, unknown>
): Promise<BlockOutput> {
const credentialId = typeof inputs.credentialId === 'string' ? inputs.credentialId.trim() : ''
if (!credentialId) {
throw new Error('No credential selected')
}
const record = await db.query.credential.findFirst({
where: and(
eq(credential.id, credentialId),
eq(credential.workspaceId, workspaceId),
eq(credential.type, 'oauth')
),
columns: {
id: true,
displayName: true,
providerId: true,
},
})
if (!record) {
throw new Error(`Credential not found: ${credentialId}`)
}
logger.info('Credential block resolved', { credentialId: record.id })
return {
credentialId: record.id,
displayName: record.displayName,
providerId: record.providerId ?? '',
}
}
private async listCredentials(
workspaceId: string,
inputs: Record<string, unknown>
): Promise<BlockOutput> {
const providerFilter = Array.isArray(inputs.providerFilter)
? (inputs.providerFilter as string[]).filter(Boolean)
: []
const conditions = [eq(credential.workspaceId, workspaceId), eq(credential.type, 'oauth')]
if (providerFilter.length > 0) {
conditions.push(inArray(credential.providerId, providerFilter))
}
const records = await db.query.credential.findMany({
where: and(...conditions),
columns: {
id: true,
displayName: true,
providerId: true,
},
orderBy: [asc(credential.displayName)],
})
const credentials = records.map((r) => ({
credentialId: r.id,
displayName: r.displayName,
providerId: r.providerId ?? '',
}))
logger.info('Credential block listed credentials', {
count: credentials.length,
providerFilter: providerFilter.length > 0 ? providerFilter : undefined,
})
return {
credentials,
count: credentials.length,
}
}
}

View File

@@ -8,6 +8,7 @@
import { AgentBlockHandler } from '@/executor/handlers/agent/agent-handler'
import { ApiBlockHandler } from '@/executor/handlers/api/api-handler'
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
import { CredentialBlockHandler } from '@/executor/handlers/credential/credential-handler'
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
@@ -42,6 +43,7 @@ export function createBlockHandlers(): BlockHandler[] {
new WorkflowBlockHandler(),
new WaitBlockHandler(),
new EvaluatorBlockHandler(),
new CredentialBlockHandler(),
new GenericBlockHandler(),
]
}

View File

@@ -89,6 +89,7 @@ interface CreateApiKeyParams {
workspaceId: string
name: string
keyType: 'personal' | 'workspace'
source?: 'settings' | 'deploy_modal'
}
/**
@@ -98,16 +99,19 @@ export function useCreateApiKey() {
const queryClient = useQueryClient()
return useMutation({
mutationFn: async ({ workspaceId, name, keyType }: CreateApiKeyParams) => {
mutationFn: async ({ workspaceId, name, keyType, source }: CreateApiKeyParams) => {
const url =
keyType === 'workspace'
? `/api/workspaces/${workspaceId}/api-keys`
: '/api/users/me/api-keys'
const body: Record<string, unknown> = { name: name.trim() }
if (keyType === 'workspace' && source) body.source = source
const response = await fetch(url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ name: name.trim() }),
body: JSON.stringify(body),
})
if (!response.ok) {

View File

@@ -73,7 +73,7 @@ export const workspaceCredentialKeys = {
* Fetch workspace credential list from API.
* Used by the prefetch function for hover-based cache warming.
*/
async function fetchWorkspaceCredentialList(
export async function fetchWorkspaceCredentialList(
workspaceId: string,
signal?: AbortSignal
): Promise<WorkspaceCredential[]> {

View File

@@ -2,6 +2,7 @@ import { useQuery } from '@tanstack/react-query'
import type { Credential } from '@/lib/oauth'
import { CREDENTIAL_SET } from '@/executor/constants'
import { useCredentialSetDetail } from '@/hooks/queries/credential-sets'
import { useWorkspaceCredential } from '@/hooks/queries/credentials'
import { fetchJson } from '@/hooks/selectors/helpers'
interface CredentialListResponse {
@@ -163,17 +164,26 @@ export function useCredentialName(
shouldFetchDetail
)
// Fallback for credential blocks that have no serviceId/providerId — look up by ID directly
const { data: workspaceCredential, isFetching: workspaceCredentialLoading } =
useWorkspaceCredential(!providerId && !isCredentialSet ? credentialId : undefined)
const detailCredential = foreignCredentials[0]
const hasForeignMeta = foreignCredentials.length > 0
const displayName =
credentialSetData?.name ?? selectedCredential?.name ?? detailCredential?.name ?? null
credentialSetData?.name ??
selectedCredential?.name ??
detailCredential?.name ??
workspaceCredential?.displayName ??
null
return {
displayName,
isLoading:
credentialsLoading ||
foreignLoading ||
workspaceCredentialLoading ||
(isCredentialSet && credentialSetLoading && !credentialSetData),
hasForeignMeta,
}

View File

@@ -147,6 +147,14 @@ async function initializeOpenTelemetry() {
} catch (err) {
logger.error('Error shutting down OpenTelemetry SDK', err)
}
try {
const { getPostHogClient } = await import('@/lib/posthog/server')
await getPostHogClient()?.shutdown()
logger.info('PostHog client shut down successfully')
} catch (err) {
logger.error('Error shutting down PostHog client', err)
}
}
process.on('SIGTERM', shutdownHandler)

View File

@@ -20,7 +20,7 @@ import {
organization,
} from 'better-auth/plugins'
import { emailHarmony } from 'better-auth-harmony'
import { and, eq, inArray, sql } from 'drizzle-orm'
import { and, count, eq, inArray, sql } from 'drizzle-orm'
import { headers } from 'next/headers'
import Stripe from 'stripe'
import {
@@ -47,6 +47,7 @@ import { isOrgPlan, isTeam } from '@/lib/billing/plan-helpers'
import { getPlans, resolvePlanFromStripeSubscription } from '@/lib/billing/plans'
import { hasPaidSubscriptionStatus } from '@/lib/billing/subscriptions/utils'
import { syncSeatsFromStripeQuantity } from '@/lib/billing/validation/seat-management'
import { handleAbandonedCheckout } from '@/lib/billing/webhooks/checkout'
import { handleChargeDispute, handleDisputeClosed } from '@/lib/billing/webhooks/disputes'
import { handleManualEnterpriseSubscription } from '@/lib/billing/webhooks/enterprise'
import {
@@ -75,6 +76,8 @@ import { processCredentialDraft } from '@/lib/credentials/draft-processor'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getFromEmailAddress, getPersonalEmailFrom } from '@/lib/messaging/email/utils'
import { quickValidateEmail } from '@/lib/messaging/email/validation'
import { scheduleLifecycleEmail } from '@/lib/messaging/lifecycle'
import { captureServerEvent } from '@/lib/posthog/server'
import { syncAllWebhooksForCredentialSet } from '@/lib/webhooks/utils.server'
import { SSO_TRUSTED_PROVIDERS } from '@/ee/sso/constants'
import { createAnonymousSession, ensureAnonymousUserExists } from './anonymous'
@@ -221,6 +224,19 @@ export const auth = betterAuth({
error,
})
}
try {
await scheduleLifecycleEmail({
userId: user.id,
type: 'onboarding-followup',
delayDays: 5,
})
} catch (error) {
logger.error(
'[databaseHooks.user.create.after] Failed to schedule onboarding followup email',
{ userId: user.id, error }
)
}
}
},
},
@@ -355,6 +371,40 @@ export const auth = betterAuth({
})
}
try {
const [{ value: accountCount }] = await db
.select({ value: count() })
.from(schema.account)
.where(eq(schema.account.userId, account.userId))
if (accountCount === 1) {
const { providerId } = account
const authMethod =
providerId === 'credential'
? 'email'
: SSO_TRUSTED_PROVIDERS.includes(providerId)
? 'sso'
: 'oauth'
captureServerEvent(
account.userId,
'user_created',
{
auth_method: authMethod,
...(providerId !== 'credential' ? { provider: providerId } : {}),
},
{ setOnce: { signup_at: new Date().toISOString() } }
)
}
} catch (error) {
logger.error(
'[databaseHooks.account.create.after] Failed to capture user_created event',
{
userId: account.userId,
error,
}
)
}
if (account.providerId === 'salesforce') {
const updates: {
accessTokenExpiresAt?: Date
@@ -596,6 +646,19 @@ export const auth = betterAuth({
error,
})
}
try {
await scheduleLifecycleEmail({
userId: user.id,
type: 'onboarding-followup',
delayDays: 5,
})
} catch (error) {
logger.error(
'[emailVerification.onEmailVerification] Failed to schedule onboarding followup email',
{ userId: user.id, error }
)
}
}
},
},
@@ -2954,6 +3017,10 @@ export const auth = betterAuth({
await handleManualEnterpriseSubscription(event)
break
}
case 'checkout.session.expired': {
await handleAbandonedCheckout(event)
break
}
case 'charge.dispute.created': {
await handleChargeDispute(event)
break

View File

@@ -4,6 +4,7 @@ import { createLogger } from '@sim/logger'
import { eq, inArray } from 'drizzle-orm'
import {
getEmailSubject,
renderCreditsExhaustedEmail,
renderFreeTierUpgradeEmail,
renderUsageThresholdEmail,
} from '@/components/emails'
@@ -714,16 +715,16 @@ export async function maybeSendUsageThresholdEmail(params: {
const baseUrl = getBaseUrl()
const isFreeUser = params.planName === 'Free'
// Check for 80% threshold (all users)
// Check for 80% threshold crossing — used for paid users (budget warning) and free users (upgrade nudge)
const crosses80 = params.percentBefore < 80 && params.percentAfter >= 80
// Check for 90% threshold (free users only)
const crosses90 = params.percentBefore < 90 && params.percentAfter >= 90
// Check for 100% threshold (free users only — credits exhausted)
const crosses100 = params.percentBefore < 100 && params.percentAfter >= 100
// Skip if no thresholds crossed
if (!crosses80 && !crosses90) return
if (!crosses80 && !crosses100) return
// For 80% threshold email (all users)
if (crosses80) {
// For 80% threshold email (paid users only)
if (crosses80 && !isFreeUser) {
const ctaLink = `${baseUrl}/workspace?billing=usage`
const sendTo = async (email: string, name?: string) => {
const prefs = await getEmailPreferences(email)
@@ -777,8 +778,8 @@ export async function maybeSendUsageThresholdEmail(params: {
}
}
// For 90% threshold email (free users only)
if (crosses90 && isFreeUser) {
// For 80% threshold email (free users only — skip if they also crossed 100% in same call)
if (crosses80 && isFreeUser && !crosses100) {
const upgradeLink = `${baseUrl}/workspace?billing=upgrade`
const sendFreeTierEmail = async (email: string, name?: string) => {
const prefs = await getEmailPreferences(email)
@@ -818,6 +819,44 @@ export async function maybeSendUsageThresholdEmail(params: {
await sendFreeTierEmail(params.userEmail, params.userName)
}
}
// For 100% threshold email (free users only — credits exhausted)
if (crosses100 && isFreeUser) {
const upgradeLink = `${baseUrl}/workspace?billing=upgrade`
const sendExhaustedEmail = async (email: string, name?: string) => {
const prefs = await getEmailPreferences(email)
if (prefs?.unsubscribeAll || prefs?.unsubscribeNotifications) return
const html = await renderCreditsExhaustedEmail({
userName: name,
limit: params.limit,
upgradeLink,
})
await sendEmail({
to: email,
subject: getEmailSubject('free-tier-exhausted'),
html,
emailType: 'notifications',
})
logger.info('Free tier credits exhausted email sent', {
email,
currentUsage: params.currentUsageAfter,
limit: params.limit,
})
}
if (params.scope === 'user' && params.userId && params.userEmail) {
const rows = await db
.select({ enabled: settings.billingUsageNotificationsEnabled })
.from(settings)
.where(eq(settings.userId, params.userId))
.limit(1)
if (rows.length > 0 && rows[0].enabled === false) return
await sendExhaustedEmail(params.userEmail, params.userName)
}
}
} catch (error) {
logger.error('Failed to send usage threshold email', {
scope: params.scope,

View File

@@ -0,0 +1,58 @@
import { db } from '@sim/db'
import { user } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm'
import type Stripe from 'stripe'
import { getEmailSubject, renderAbandonedCheckoutEmail } from '@/components/emails'
import { isProPlan } from '@/lib/billing/core/subscription'
import { sendEmail } from '@/lib/messaging/email/mailer'
import { getPersonalEmailFrom } from '@/lib/messaging/email/utils'
const logger = createLogger('CheckoutWebhooks')
/**
* Handles checkout.session.expired — fires when a user starts an upgrade but doesn't complete it.
* Sends a plain personal email to check in and offer help.
* Only fires for subscription-mode sessions to avoid misfires on credit purchase or setup sessions.
* Skips users who have already completed a subscription (session may expire after a successful upgrade).
*/
export async function handleAbandonedCheckout(event: Stripe.Event): Promise<void> {
const session = event.data.object as Stripe.Checkout.Session
if (session.mode !== 'subscription') return
const customerId = typeof session.customer === 'string' ? session.customer : session.customer?.id
if (!customerId) {
logger.warn('No customer ID on expired session', { sessionId: session.id })
return
}
const [userData] = await db
.select({ id: user.id, email: user.email, name: user.name })
.from(user)
.where(eq(user.stripeCustomerId, customerId))
.limit(1)
if (!userData?.email) {
logger.warn('No user found for Stripe customer', { customerId, sessionId: session.id })
return
}
// Skip if the user already has a paid plan (direct or via org) — covers session expiring after a successful upgrade
const alreadySubscribed = await isProPlan(userData.id)
if (alreadySubscribed) return
const { from, replyTo } = getPersonalEmailFrom()
const html = await renderAbandonedCheckoutEmail(userData.name || undefined)
await sendEmail({
to: userData.email,
subject: getEmailSubject('abandoned-checkout'),
html,
from,
replyTo,
emailType: 'notifications',
})
logger.info('Sent abandoned checkout email', { userId: userData.id, sessionId: session.id })
}

View File

@@ -13,6 +13,7 @@ import {
getBilledOverageForSubscription,
resetUsageForSubscription,
} from '@/lib/billing/webhooks/invoices'
import { captureServerEvent } from '@/lib/posthog/server'
const logger = createLogger('StripeSubscriptionWebhooks')
@@ -155,6 +156,14 @@ export async function handleSubscriptionCreated(subscriptionData: {
otherActiveSubscriptionsCount: otherActiveSubscriptions.length,
})
}
if (wasFreePreviously && isPaidPlan) {
captureServerEvent(subscriptionData.referenceId, 'subscription_created', {
plan: subscriptionData.plan ?? 'unknown',
status: subscriptionData.status,
reference_id: subscriptionData.referenceId,
})
}
} catch (error) {
logger.error('Failed to handle subscription creation usage reset', {
subscriptionId: subscriptionData.id,
@@ -205,6 +214,12 @@ export async function handleSubscriptionDeleted(subscription: {
organizationDeleted,
membersSynced,
})
captureServerEvent(subscription.referenceId, 'subscription_cancelled', {
plan: subscription.plan ?? 'unknown',
reference_id: subscription.referenceId,
})
return
}
@@ -337,6 +352,11 @@ export async function handleSubscriptionDeleted(subscription: {
organizationDeleted,
membersSynced,
})
captureServerEvent(subscription.referenceId, 'subscription_cancelled', {
plan: subscription.plan ?? 'unknown',
reference_id: subscription.referenceId,
})
} catch (error) {
logger.error('Failed to handle subscription deletion', {
subscriptionId: subscription.id,

View File

@@ -0,0 +1,48 @@
import { createLogger } from '@sim/logger'
import { tasks } from '@trigger.dev/sdk'
import { env } from '@/lib/core/config/env'
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
const logger = createLogger('LifecycleEmail')
export const LIFECYCLE_EMAIL_TASK_ID = 'lifecycle-email' as const
/** Supported lifecycle email types. Add new types here as the sequence grows. */
export type LifecycleEmailType = 'onboarding-followup'
interface ScheduleLifecycleEmailOptions {
userId: string
type: LifecycleEmailType
delayDays: number
}
/**
* Schedules a lifecycle email to be sent after a delay.
* Uses Trigger.dev's built-in delay scheduling — no polling or cron needed.
*/
export async function scheduleLifecycleEmail({
userId,
type,
delayDays,
}: ScheduleLifecycleEmailOptions): Promise<void> {
if (!isTriggerDevEnabled || !env.TRIGGER_SECRET_KEY) {
logger.info('[lifecycle] Trigger.dev not configured, skipping lifecycle email', {
userId,
type,
})
return
}
const delayUntil = new Date(Date.now() + delayDays * 24 * 60 * 60 * 1000)
await tasks.trigger(
LIFECYCLE_EMAIL_TASK_ID,
{ userId, type },
{
delay: delayUntil,
idempotencyKey: `lifecycle-${type}-${userId}`,
}
)
logger.info('[lifecycle] Scheduled lifecycle email', { userId, type, delayDays })
}

View File

@@ -0,0 +1,46 @@
import type { PostHog } from 'posthog-js'
import type { PostHogEventMap, PostHogEventName } from '@/lib/posthog/events'
/**
* Capture a client-side PostHog event from a non-React context (e.g. Zustand stores).
*
* Uses the same dynamic `import('posthog-js')` pattern as `session-provider.tsx`.
* Fully fire-and-forget — never throws, never blocks.
*
* React components should use {@link captureEvent} with the `posthog` instance from `usePostHog()`.
*
* @param event - Typed event name from {@link PostHogEventMap}.
* @param properties - Strongly-typed property bag for this event.
*/
export function captureClientEvent<E extends PostHogEventName>(
event: E,
properties: PostHogEventMap[E]
): void {
import('posthog-js')
.then(({ default: posthog }) => {
try {
if (typeof posthog.capture === 'function') {
posthog.capture(event, properties)
}
} catch {}
})
.catch(() => {})
}
/**
* Typed wrapper for `posthog.capture` in React components.
*
* Enforces event names and property shapes from {@link PostHogEventMap} at compile time,
* matching the type safety provided by `captureServerEvent` on the server side.
*
* @param posthog - PostHog instance from `usePostHog()`.
* @param event - Typed event name from {@link PostHogEventMap}.
* @param properties - Strongly-typed property bag for this event.
*/
export function captureEvent<E extends PostHogEventName>(
posthog: PostHog | null | undefined,
event: E,
properties: PostHogEventMap[E]
): void {
posthog?.capture(event, properties as Record<string, unknown>)
}

View File

@@ -0,0 +1,394 @@
/**
* PostHog product analytics event catalog.
*
* Type-only module — zero runtime overhead. All event names and property shapes
* are defined here as a single source of truth for compile-time safety at every
* capture call site.
*/
export interface PostHogEventMap {
user_created: {
auth_method: 'email' | 'oauth' | 'sso'
provider?: string
}
landing_page_viewed: Record<string, never>
signup_page_viewed: Record<string, never>
subscription_created: {
plan: string
status: string
reference_id: string
}
subscription_cancelled: {
plan: string
reference_id: string
}
subscription_changed: {
from_plan: string
to_plan: string
interval: string
}
workspace_created: {
workspace_id: string
name: string
}
workspace_member_invited: {
workspace_id: string
invitee_role: string
}
workspace_member_removed: {
workspace_id: string
is_self_removal: boolean
}
workspace_member_role_changed: {
workspace_id: string
new_role: string
}
workflow_created: {
workflow_id: string
workspace_id: string
name: string
}
workflow_deployed: {
workflow_id: string
workspace_id: string
}
/** `block_types` is a comma-separated deduped list of block types that ran. */
workflow_executed: {
workflow_id: string
workspace_id: string
trigger_type: string
success: boolean
block_count: number
block_types: string
duration_ms: number
}
workflow_execution_failed: {
workflow_id: string
workspace_id: string
trigger_type: string
error_message: string
}
workflow_duplicated: {
source_workflow_id: string
new_workflow_id: string
workspace_id: string
}
workflow_deleted: {
workflow_id: string
workspace_id: string
}
workflow_deployment_reverted: {
workflow_id: string
workspace_id: string
version: string
}
workflow_execution_cancelled: {
workflow_id: string
workspace_id: string
}
workflow_undeployed: {
workflow_id: string
workspace_id: string
}
workflow_restored: {
workflow_id: string
workspace_id: string
}
workflow_public_api_toggled: {
workflow_id: string
workspace_id: string
is_public: boolean
}
deployment_version_activated: {
workflow_id: string
workspace_id: string
version: number
}
webhook_trigger_created: {
webhook_id: string
workflow_id: string
workspace_id: string
provider: string
}
webhook_trigger_deleted: {
webhook_id: string
workflow_id: string
workspace_id: string
provider: string
}
skill_created: {
skill_id: string
skill_name: string
workspace_id: string
source?: 'settings' | 'tool_input'
}
skill_deleted: {
skill_id: string
workspace_id: string
source?: 'settings' | 'tool_input'
}
workspace_deleted: {
workspace_id: string
workflow_count: number
}
notification_channel_deleted: {
notification_id: string
workspace_id: string
notification_type: string
}
a2a_agent_deleted: {
agent_id: string
workflow_id: string
workspace_id: string
}
a2a_agent_published: {
agent_id: string
workflow_id: string
workspace_id: string
}
a2a_agent_unpublished: {
agent_id: string
workflow_id: string
workspace_id: string
}
a2a_agent_created: {
agent_id: string
workflow_id: string
workspace_id: string
}
block_added: {
block_type: string
workflow_id: string
}
block_removed: {
block_type: string
workflow_id: string
}
knowledge_base_created: {
knowledge_base_id: string
workspace_id: string
name: string
}
knowledge_base_document_uploaded: {
knowledge_base_id: string
workspace_id: string
document_count: number
upload_type: 'single' | 'bulk'
}
knowledge_base_connector_added: {
knowledge_base_id: string
workspace_id: string
connector_type: string
sync_interval_minutes: number
}
knowledge_base_connector_removed: {
knowledge_base_id: string
workspace_id: string
connector_type: string
documents_deleted: number
}
knowledge_base_connector_synced: {
knowledge_base_id: string
workspace_id: string
connector_type: string
}
knowledge_base_opened: {
knowledge_base_id: string
knowledge_base_name: string
}
file_uploaded: {
workspace_id: string
file_type: string
}
api_key_created: {
workspace_id: string
key_name: string
source?: 'settings' | 'deploy_modal'
}
api_key_revoked: {
workspace_id: string
key_name: string
}
mcp_server_connected: {
workspace_id: string
server_name: string
transport: string
source?: 'settings' | 'tool_input'
}
mcp_server_disconnected: {
workspace_id: string
server_name: string
source?: 'settings' | 'tool_input'
}
credential_connected: {
credential_type: 'oauth' | 'env_workspace' | 'env_personal' | 'service_account'
provider_id: string
workspace_id: string
}
credential_deleted: {
credential_type: 'oauth' | 'env_workspace' | 'env_personal' | 'service_account'
provider_id: string
workspace_id: string
}
copilot_chat_sent: {
workflow_id: string
workspace_id: string
has_file_attachments: boolean
has_contexts: boolean
mode: string
}
copilot_feedback_submitted: {
is_positive: boolean
has_text_feedback: boolean
has_workflow_yaml: boolean
}
/** `template_modules` is a space-separated list of module tags, e.g. `"agent tables knowledge-base"`. */
template_used: {
template_title: string
template_modules: string
}
settings_tab_viewed: {
section: string
}
table_opened: {
table_id: string
workspace_id: string
}
table_created: {
table_id: string
workspace_id: string
column_count: number
}
table_deleted: {
table_id: string
workspace_id: string
}
custom_tool_saved: {
tool_id: string
workspace_id: string
tool_name: string
source?: 'settings' | 'tool_input'
}
custom_tool_deleted: {
tool_id: string
workspace_id: string
source?: 'settings' | 'tool_input'
}
byok_key_added: {
workspace_id: string
provider_id: string
}
byok_key_removed: {
workspace_id: string
provider_id: string
}
notification_channel_created: {
workspace_id: string
notification_type: 'webhook' | 'email' | 'slack'
alert_rule: string | null
}
task_created: {
workspace_id: string
}
task_renamed: {
workspace_id: string
}
task_deleted: {
workspace_id: string
}
task_marked_read: {
workspace_id: string
}
task_marked_unread: {
workspace_id: string
}
task_message_sent: {
has_attachments: boolean
has_contexts: boolean
is_new_task: boolean
}
tour_started: {
tour_type: 'nav' | 'workflow'
}
tour_completed: {
tour_type: 'nav' | 'workflow'
}
tour_skipped: {
tour_type: 'nav' | 'workflow'
step_index: number
}
docs_opened: {
source: 'help_menu' | 'editor_button' | 'toolbar_context_menu'
block_type?: string
}
}
export type PostHogEventName = keyof PostHogEventMap

View File

@@ -0,0 +1,86 @@
import { createLogger } from '@sim/logger'
import type { PostHogEventMap, PostHogEventName } from '@/lib/posthog/events'
const logger = createLogger('PostHogServer')
let _client: import('posthog-node').PostHog | null = null
let _disabled = false
export function getPostHogClient(): import('posthog-node').PostHog | null {
return getClient()
}
function getClient(): import('posthog-node').PostHog | null {
if (_disabled) return null
if (_client) return _client
const key = process.env.NEXT_PUBLIC_POSTHOG_KEY
const enabled = process.env.NEXT_PUBLIC_POSTHOG_ENABLED
if (!key || !enabled || enabled === 'false' || enabled === '0') {
_disabled = true
return null
}
// eslint-disable-next-line @typescript-eslint/no-require-imports
const { PostHog } = require('posthog-node') as typeof import('posthog-node')
_client = new PostHog(key, {
host: 'https://us.i.posthog.com',
flushAt: 20,
flushInterval: 10_000,
})
return _client
}
type PersonProperties = Record<string, string | number | boolean>
interface CaptureOptions {
/**
* Associate this event with workspace-level group analytics.
* Pass `{ workspace: workspaceId }`.
*/
groups?: Record<string, string>
/**
* Person properties to update on every capture (`$set`).
* Use for mutable state like `plan`, `total_workflows`.
*/
set?: PersonProperties
/**
* Person properties to set only once (`$set_once`).
* Use for immutable milestones like `first_execution_at`.
*/
setOnce?: PersonProperties
}
/**
* Capture a server-side PostHog event. Fire-and-forget — never throws.
*
* @param distinctId - The user (or workspace/org) ID to associate the event with.
* @param event - Typed event name from {@link PostHogEventMap}.
* @param properties - Strongly-typed property bag for this event.
* @param options - Optional groups, $set, and $set_once person properties.
*/
export function captureServerEvent<E extends PostHogEventName>(
distinctId: string,
event: E,
properties: PostHogEventMap[E],
options?: CaptureOptions
): void {
try {
const client = getClient()
if (!client) return
client.capture({
distinctId,
event,
properties: {
...properties,
...(options?.groups ? { $groups: options.groups } : {}),
...(options?.set ? { $set: options.set } : {}),
...(options?.setOnce ? { $set_once: options.setOnce } : {}),
},
})
} catch (error) {
logger.warn('Failed to capture PostHog server event', { event, error })
}
}

View File

@@ -158,6 +158,32 @@ export async function getCustomToolById(params: {
return legacyTool[0] || null
}
export async function getCustomToolByIdOrTitle(params: {
identifier: string
userId: string
workspaceId?: string
}) {
const { identifier, userId, workspaceId } = params
const conditions = [or(eq(customTools.id, identifier), eq(customTools.title, identifier))]
if (workspaceId) {
const workspaceTool = await db
.select()
.from(customTools)
.where(and(eq(customTools.workspaceId, workspaceId), ...conditions))
.limit(1)
if (workspaceTool[0]) return workspaceTool[0]
}
const legacyTool = await db
.select()
.from(customTools)
.where(and(isNull(customTools.workspaceId), eq(customTools.userId, userId), ...conditions))
.limit(1)
return legacyTool[0] || null
}
export async function deleteCustomTool(params: {
toolId: string
userId: string

View File

@@ -1,6 +1,7 @@
import { createLogger } from '@sim/logger'
import { v4 as uuidv4 } from 'uuid'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { captureServerEvent } from '@/lib/posthog/server'
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
import { PauseResumeManager } from '@/lib/workflows/executor/human-in-the-loop-manager'
import { ExecutionSnapshot } from '@/executor/execution/snapshot'
@@ -79,6 +80,8 @@ export async function executeWorkflow(
streamConfig?.selectedOutputs || []
)
const executionStartMs = Date.now()
const result = await executeWorkflowCore({
snapshot,
callbacks: {
@@ -97,6 +100,33 @@ export async function executeWorkflow(
runFromBlock: streamConfig?.runFromBlock,
})
const blockTypes = [
...new Set(
(result.logs ?? [])
.map((log) => log.blockType)
.filter((t): t is string => typeof t === 'string')
),
]
if (result.status !== 'paused') {
captureServerEvent(
actorUserId,
'workflow_executed',
{
workflow_id: workflowId,
workspace_id: workspaceId,
trigger_type: triggerType,
success: result.success,
block_count: result.logs?.length ?? 0,
block_types: blockTypes.join(','),
duration_ms: Date.now() - executionStartMs,
},
{
groups: { workspace: workspaceId },
setOnce: { first_execution_at: new Date().toISOString() },
}
)
}
if (result.status === 'paused') {
if (!result.snapshotSeed) {
logger.error(`[${requestId}] Missing snapshot seed for paused execution`, {
@@ -123,7 +153,14 @@ export async function executeWorkflow(
}
}
} else {
await PauseResumeManager.processQueuedResumes(executionId)
try {
await PauseResumeManager.processQueuedResumes(executionId)
} catch (resumeError) {
logger.error(`[${requestId}] Failed to process queued resumes`, {
executionId,
error: resumeError instanceof Error ? resumeError.message : String(resumeError),
})
}
}
if (streamConfig?.skipLoggingComplete) {
@@ -139,6 +176,19 @@ export async function executeWorkflow(
return result
} catch (error: unknown) {
logger.error(`[${requestId}] Workflow execution failed:`, error)
captureServerEvent(
actorUserId,
'workflow_execution_failed',
{
workflow_id: workflow.id,
workspace_id: workspaceId,
trigger_type: streamConfig?.workflowTriggerType || 'api',
error_message: error instanceof Error ? error.message : String(error),
},
{ groups: { workspace: workspaceId } }
)
throw error
}
}

Some files were not shown because too many files have changed in this diff Show More