v0.6.30: slack trigger enhancements, connectors performance improvements, secrets performance, polling refactors, drag resources in mothership

This commit is contained in:
Waleed
2026-04-08 01:00:43 -07:00
committed by GitHub
137 changed files with 6051 additions and 2897 deletions

View File

@@ -4687,6 +4687,33 @@ export function CloudFormationIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function AthenaIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 80 80'
version='1.1'
xmlns='http://www.w3.org/2000/svg'
xmlnsXlink='http://www.w3.org/1999/xlink'
>
<g
id='Icon-Architecture/64/Arch_Amazon-Athena_64'
stroke='none'
strokeWidth='1'
fill='none'
fillRule='evenodd'
transform='translate(40, 40) scale(1.25) translate(-40, -40)'
>
<path
d='M38.29505,27.2267312 C42.787319,27.2267312 45.2478437,28.2331825 45.6964751,28.7379193 C45.2478437,29.2426562 42.787319,30.2491074 38.29505,30.2491074 C33.8027811,30.2491074 31.3422564,29.2426562 30.893625,28.7379193 C31.3422564,28.2331825 33.8027811,27.2267312 38.29505,27.2267312 L38.29505,27.2267312 Z M37.7838882,35.2823712 C37.6191254,35.1977447 37.5029973,35.0294991 37.5029973,34.8300223 C37.5029973,34.5499487 37.7292981,34.3212556 38.0062188,34.3212556 C38.0866151,34.3212556 38.1600636,34.3444272 38.2285494,34.3796882 L37.7838882,35.2823712 Z M43.5674612,43.5908834 C43.4930201,43.6513309 43.322302,43.7681961 42.9709403,43.9092403 C42.6582879,44.0341652 42.2880677,44.1470006 41.8682202,44.2457316 C40.7525971,44.5076708 39.3808968,44.6517374 38.0052262,44.6517374 C34.9968155,44.6517374 32.9005556,44.0019265 32.4489466,43.5989431 L31.1159556,31.150783 C33.1596104,31.9869737 36.1700063,32.2640249 38.29505,32.2640249 C40.3843621,32.2640249 43.3292498,31.9950334 45.3719121,31.1910813 L44.5748967,36.6656121 C43.0731726,36.0994203 41.1992434,35.2773339 39.4235763,34.4129344 C39.2429327,33.786295 38.6801584,33.3248789 38.0062188,33.3248789 C37.1883598,33.3248789 36.5233532,34.0008837 36.5233532,34.8300223 C36.5233532,35.6611757 37.1883598,36.3361731 38.0062188,36.3361731 C38.1997655,36.3361731 38.3843793,36.2958747 38.5531123,36.2273675 C41.0344805,37.4524373 42.8835961,38.2382552 44.2751474,38.7228428 L43.5674612,43.5908834 Z M28.8718062,28.8467249 L30.4787403,43.8498003 C30.5918907,46.6344162 37.6995217,46.6666549 38.0052262,46.6666549 C39.5268012,46.6666549 41.0573091,46.5034466 42.3148665,46.2092686 C42.8299985,46.0883736 43.2964958,45.9453144 43.7004625,45.7831136 C44.8736534,45.3116229 45.4890327,44.6688642 45.5317122,43.8739793 L46.2006891,39.2759376 C46.6562683,39.3696313 47.0284735,39.4109371 47.3252452,39.4109371 C48.2592321,39.4109371 48.5053839,39.0281028 48.6751094,38.7641486 C48.853768,38.48609 48.9053804,38.1445615 48.8220064,37.8010181 C48.6314374,37.0111704 47.5168068,35.971473 46.7723963,35.3539008 L47.7133311,28.8850083 L47.7043982,28.8840008 C47.7083684,28.8346354 47.7242492,28.7882923 47.7242492,28.7379193 C47.7242492,25.9543109 41.7967568,25.2118138 38.29505,25.2118138 C34.7933433,25.2118138 28.8658509,25.9543109 28.8658509,28.7379193 C28.8658509,28.7751953 28.8787541,28.8084414 28.8807391,28.8457174 L28.8718062,28.8467249 Z M37.8355007,20.0596698 C46.4865427,20.0596698 53.5246954,27.2035597 53.5246954,35.98457 C53.5246954,44.7655803 46.4865427,51.9094701 37.8355007,51.9094701 C29.1834661,51.9094701 22.1453133,44.7655803 22.1453133,35.98457 C22.1453133,27.2035597 29.1834661,20.0596698 37.8355007,20.0596698 L37.8355007,20.0596698 Z M12.9850945,41.8348828 L12.9850945,43.8498003 L21.91802,43.8498003 L21.91802,43.7309201 C24.7735785,49.7494786 30.8261318,53.9243876 37.8355007,53.9243876 C47.5803298,53.9243876 55.50979,45.8768072 55.50979,35.98457 C55.50979,26.0923327 47.5803298,18.0447524 37.8355007,18.0447524 C30.253432,18.0447524 23.7909567,22.9248825 21.2857674,29.7453781 L12.9850945,29.7453781 L12.9850945,31.7602955 L20.6763434,31.7602955 C20.3666686,33.0568949 20.1850325,34.4018523 20.1701443,35.7901304 L11,35.7901304 L11,37.8050479 L20.2515331,37.8050479 C20.3914823,39.2044081 20.7061198,40.548358 21.1448257,41.8348828 L12.9850945,41.8348828 Z M67.0799136,66.035049 C65.8789314,67.2560889 63.7965672,67.2631412 62.5965775,66.046131 L51.9326496,55.220987 C53.6487638,53.9223727 55.1802643,52.3900279 56.4934043,50.6763406 L67.0918241,61.4853653 C67.688345,62.0918555 68.0168782,62.8998374 68.014902,63.7591997 C68.0139005,64.6205769 67.6823898,65.4275513 67.0799136,66.035049 L67.0799136,66.035049 Z M68.4972711,60.0628336 L57.6616325,49.0100039 C60.0635969,45.2562127 61.4650736,40.7851108 61.4650736,35.98457 C61.4650736,22.7586518 50.8646687,12 37.8355007,12 C28.4728022,12 19.9825528,17.6196048 16.2039254,26.316996 L18.0202869,27.1290077 C21.4812992,19.1630316 29.2588997,14.0149175 37.8355007,14.0149175 C49.7708816,14.0149175 59.4799791,23.8698788 59.4799791,35.98457 C59.4799791,48.0982537 49.7708816,57.9542225 37.8355007,57.9542225 C29.8623684,57.9542225 22.5572205,53.5244265 18.7686675,46.3936336 L17.0217843,47.3507194 C21.1557437,55.1343455 29.1318536,59.9691399 37.8355007,59.9691399 C42.3912926,59.9691399 46.6483279,58.6503765 50.2602074,56.3735197 L61.1941082,67.4716851 C62.1648195,68.4569797 63.4561235,69 64.8278238,69 C66.2074645,69 67.5067089,68.4529499 68.4813903,67.462618 C69.4580568,66.4773233 69.9980025,65.1635972 70,63.7622221 C70.0029653,62.3628619 69.4679823,61.0491357 68.4972711,60.0628336 L68.4972711,60.0628336 Z'
id='Amazon-Athena_Icon_64_Squid'
fill='currentColor'
/>
</g>
</svg>
)
}
export function CloudWatchIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -16,6 +16,7 @@ import {
ArxivIcon,
AsanaIcon,
AshbyIcon,
AthenaIcon,
AttioIcon,
AzureIcon,
BoxCompanyIcon,
@@ -205,6 +206,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
arxiv: ArxivIcon,
asana: AsanaIcon,
ashby: AshbyIcon,
athena: AthenaIcon,
attio: AttioIcon,
box: BoxCompanyIcon,
brandfetch: BrandfetchIcon,

View File

@@ -0,0 +1,238 @@
---
title: Athena
description: Run SQL queries on data in Amazon S3 using AWS Athena
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
<BlockInfoCard
type="athena"
color="linear-gradient(45deg, #4D27A8 0%, #A166FF 100%)"
/>
{/* MANUAL-CONTENT-START:intro */}
[Amazon Athena](https://aws.amazon.com/athena/) is an interactive query service from AWS that makes it easy to analyze data directly in Amazon S3 using standard SQL. Athena is serverless, so there is no infrastructure to manage, and you pay only for the queries you run.
With Athena, you can:
- **Query data in S3**: Run SQL queries directly against data stored in Amazon S3 without loading it into a database
- **Support multiple formats**: Query CSV, JSON, Parquet, ORC, Avro, and other common data formats
- **Integrate with AWS Glue**: Use the AWS Glue Data Catalog to manage table metadata and schemas
- **Scale automatically**: Handle queries of any size without provisioning servers or clusters
- **Save and reuse queries**: Create named queries for frequently used SQL statements
In Sim, the Athena integration enables your agents to run SQL queries against data in S3, check query execution status, retrieve results, and manage saved queries — all within your agent workflows. Supported operations include:
- **Start Query**: Execute SQL queries against your S3 data
- **Get Query Execution**: Check the status and details of a running or completed query
- **Get Query Results**: Retrieve the results of a completed query
- **Stop Query**: Cancel a running query execution
- **List Query Executions**: View recent query execution IDs
- **Create Named Query**: Save a query for reuse
- **Get Named Query**: Retrieve details of a saved query
- **List Named Queries**: View all saved query IDs
This integration empowers Sim agents to automate data analysis tasks using AWS Athena, enabling workflows that query, monitor, and manage large-scale data in S3 without manual effort or infrastructure management.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate AWS Athena into workflows. Execute SQL queries against data in S3, check query status, retrieve results, manage named queries, and list executions. Requires AWS access key and secret access key.
## Tools
### `athena_start_query`
Start an SQL query execution in AWS Athena
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `queryString` | string | Yes | SQL query string to execute |
| `database` | string | No | Database name within the catalog |
| `catalog` | string | No | Data catalog name \(default: AwsDataCatalog\) |
| `outputLocation` | string | No | S3 output location for query results \(e.g., s3://bucket/path/\) |
| `workGroup` | string | No | Workgroup to execute the query in \(default: primary\) |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `queryExecutionId` | string | Unique ID of the started query execution |
### `athena_get_query_execution`
Get the status and details of an Athena query execution
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `queryExecutionId` | string | Yes | Query execution ID to check |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `queryExecutionId` | string | Query execution ID |
| `query` | string | SQL query string |
| `state` | string | Query state \(QUEUED, RUNNING, SUCCEEDED, FAILED, CANCELLED\) |
| `stateChangeReason` | string | Reason for state change \(e.g., error message\) |
| `statementType` | string | Statement type \(DDL, DML, UTILITY\) |
| `database` | string | Database name |
| `catalog` | string | Data catalog name |
| `workGroup` | string | Workgroup name |
| `submissionDateTime` | number | Query submission time \(Unix epoch ms\) |
| `completionDateTime` | number | Query completion time \(Unix epoch ms\) |
| `dataScannedInBytes` | number | Amount of data scanned in bytes |
| `engineExecutionTimeInMillis` | number | Engine execution time in milliseconds |
| `queryPlanningTimeInMillis` | number | Query planning time in milliseconds |
| `queryQueueTimeInMillis` | number | Time the query spent in queue in milliseconds |
| `totalExecutionTimeInMillis` | number | Total execution time in milliseconds |
| `outputLocation` | string | S3 location of query results |
### `athena_get_query_results`
Retrieve the results of a completed Athena query execution
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `queryExecutionId` | string | Yes | Query execution ID to get results for |
| `maxResults` | number | No | Maximum number of rows to return \(1-1000\) |
| `nextToken` | string | No | Pagination token from a previous request |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `columns` | array | Column metadata \(name and type\) |
| `rows` | array | Result rows as key-value objects |
| `nextToken` | string | Pagination token for next page of results |
| `updateCount` | number | Number of rows affected \(for INSERT/UPDATE statements\) |
### `athena_stop_query`
Stop a running Athena query execution
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `queryExecutionId` | string | Yes | Query execution ID to stop |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `success` | boolean | Whether the query was successfully stopped |
### `athena_list_query_executions`
List recent Athena query execution IDs
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `workGroup` | string | No | Workgroup to list executions for \(default: primary\) |
| `maxResults` | number | No | Maximum number of results \(0-50\) |
| `nextToken` | string | No | Pagination token from a previous request |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `queryExecutionIds` | array | List of query execution IDs |
| `nextToken` | string | Pagination token for next page |
### `athena_create_named_query`
Create a saved/named query in AWS Athena
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `name` | string | Yes | Name for the saved query |
| `database` | string | Yes | Database the query runs against |
| `queryString` | string | Yes | SQL query string to save |
| `description` | string | No | Description of the named query |
| `workGroup` | string | No | Workgroup to create the named query in |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `namedQueryId` | string | ID of the created named query |
### `athena_get_named_query`
Get details of a saved/named query in AWS Athena
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `namedQueryId` | string | Yes | Named query ID to retrieve |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `namedQueryId` | string | Named query ID |
| `name` | string | Name of the saved query |
| `description` | string | Query description |
| `database` | string | Database the query runs against |
| `queryString` | string | SQL query string |
| `workGroup` | string | Workgroup name |
### `athena_list_named_queries`
List saved/named query IDs in AWS Athena
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `awsRegion` | string | Yes | AWS region \(e.g., us-east-1\) |
| `awsAccessKeyId` | string | Yes | AWS access key ID |
| `awsSecretAccessKey` | string | Yes | AWS secret access key |
| `workGroup` | string | No | Workgroup to list named queries for |
| `maxResults` | number | No | Maximum number of results \(0-50\) |
| `nextToken` | string | No | Pagination token from a previous request |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `namedQueryIds` | array | List of named query IDs |
| `nextToken` | string | Pagination token for next page |

View File

@@ -13,6 +13,7 @@
"arxiv",
"asana",
"ashby",
"athena",
"attio",
"box",
"brandfetch",

View File

@@ -18,6 +18,7 @@ import {
xAIIcon,
} from '@/components/icons'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
interface FeaturesPreviewProps {
activeTab: number
@@ -383,7 +384,7 @@ function MiniCardIcon({ variant, color }: { variant: CardVariant; color?: string
className='h-[7px] w-[7px] flex-shrink-0 rounded-[1.5px] border'
style={{
backgroundColor: c,
borderColor: `${c}60`,
borderColor: workflowBorderColor(c),
backgroundClip: 'padding-box',
}}
/>
@@ -470,7 +471,7 @@ function WorkflowCardBody({ color }: { color: string }) {
className='absolute top-2.5 left-[40px] h-[14px] w-[14px] rounded-[3px] border-[2px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>
@@ -481,7 +482,7 @@ function WorkflowCardBody({ color }: { color: string }) {
className='absolute top-[36px] left-[68px] h-[14px] w-[14px] rounded-[3px] border-[2px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
opacity: 0.5,
}}
@@ -896,7 +897,7 @@ function MockLogDetailsSidebar({ selectedRow, onPrev, onNext }: MockLogDetailsSi
className='h-[10px] w-[10px] shrink-0 rounded-[3px] border-[1.5px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -5,6 +5,7 @@ import { Download } from 'lucide-react'
import { ArrowUpDown, Badge, Library, ListFilter, Search } from '@/components/emcn'
import type { BadgeProps } from '@/components/emcn/components/badge/badge'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
interface LogRow {
id: string
@@ -283,7 +284,7 @@ export function LandingPreviewLogs() {
className='h-[10px] w-[10px] flex-shrink-0 rounded-[3px] border-[1.5px]'
style={{
backgroundColor: log.workflowColor,
borderColor: `${log.workflowColor}60`,
borderColor: workflowBorderColor(log.workflowColor),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -11,6 +11,7 @@ import {
Table,
} from '@/components/emcn/icons'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import type { PreviewWorkflow } from '@/app/(landing)/components/landing-preview/components/landing-preview-workflow/workflow-data'
export type SidebarView =
@@ -211,7 +212,7 @@ export function LandingPreviewSidebar({
className='h-[14px] w-[14px] flex-shrink-0 rounded-[4px] border-[2.5px]'
style={{
backgroundColor: workflow.color,
borderColor: `${workflow.color}60`,
borderColor: workflowBorderColor(workflow.color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -16,6 +16,7 @@ import {
ArxivIcon,
AsanaIcon,
AshbyIcon,
AthenaIcon,
AttioIcon,
AzureIcon,
BoxCompanyIcon,
@@ -205,6 +206,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
arxiv: ArxivIcon,
asana: AsanaIcon,
ashby: AshbyIcon,
athena: AthenaIcon,
attio: AttioIcon,
box: BoxCompanyIcon,
brandfetch: BrandfetchIcon,

View File

@@ -971,6 +971,57 @@
"integrationType": "hr",
"tags": ["hiring"]
},
{
"type": "athena",
"slug": "athena",
"name": "Athena",
"description": "Run SQL queries on data in Amazon S3 using AWS Athena",
"longDescription": "Integrate AWS Athena into workflows. Execute SQL queries against data in S3, check query status, retrieve results, manage named queries, and list executions. Requires AWS access key and secret access key.",
"bgColor": "linear-gradient(45deg, #4D27A8 0%, #A166FF 100%)",
"iconName": "AthenaIcon",
"docsUrl": "https://docs.sim.ai/tools/athena",
"operations": [
{
"name": "Start Query",
"description": "Start an SQL query execution in AWS Athena"
},
{
"name": "Get Query Execution",
"description": "Get the status and details of an Athena query execution"
},
{
"name": "Get Query Results",
"description": "Retrieve the results of a completed Athena query execution"
},
{
"name": "Stop Query",
"description": "Stop a running Athena query execution"
},
{
"name": "List Query Executions",
"description": "List recent Athena query execution IDs"
},
{
"name": "Create Named Query",
"description": "Create a saved/named query in AWS Athena"
},
{
"name": "Get Named Query",
"description": "Get details of a saved/named query in AWS Athena"
},
{
"name": "List Named Queries",
"description": "List saved/named query IDs in AWS Athena"
}
],
"operationCount": 8,
"triggers": [],
"triggerCount": 0,
"authType": "none",
"category": "tools",
"integrationType": "analytics",
"tags": ["cloud", "data-analytics"]
},
{
"type": "attio",
"slug": "attio",

View File

@@ -0,0 +1,69 @@
import { CreateNamedQueryCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaCreateNamedQuery')
const CreateNamedQuerySchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
name: z.string().min(1, 'Query name is required'),
database: z.string().min(1, 'Database is required'),
queryString: z.string().min(1, 'Query string is required'),
description: z.string().optional(),
workGroup: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = CreateNamedQuerySchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new CreateNamedQueryCommand({
Name: data.name,
Database: data.database,
QueryString: data.queryString,
...(data.description && { Description: data.description }),
...(data.workGroup && { WorkGroup: data.workGroup }),
})
const response = await client.send(command)
if (!response.NamedQueryId) {
throw new Error('No named query ID returned')
}
return NextResponse.json({
success: true,
output: {
namedQueryId: response.NamedQueryId,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to create Athena named query'
logger.error('CreateNamedQuery failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,66 @@
import { GetNamedQueryCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaGetNamedQuery')
const GetNamedQuerySchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
namedQueryId: z.string().min(1, 'Named query ID is required'),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = GetNamedQuerySchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new GetNamedQueryCommand({
NamedQueryId: data.namedQueryId,
})
const response = await client.send(command)
const namedQuery = response.NamedQuery
if (!namedQuery) {
throw new Error('No named query data returned')
}
return NextResponse.json({
success: true,
output: {
namedQueryId: namedQuery.NamedQueryId ?? data.namedQueryId,
name: namedQuery.Name ?? '',
description: namedQuery.Description ?? null,
database: namedQuery.Database ?? '',
queryString: namedQuery.QueryString ?? '',
workGroup: namedQuery.WorkGroup ?? null,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Failed to get Athena named query'
logger.error('GetNamedQuery failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,77 @@
import { GetQueryExecutionCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaGetQueryExecution')
const GetQueryExecutionSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
queryExecutionId: z.string().min(1, 'Query execution ID is required'),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = GetQueryExecutionSchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new GetQueryExecutionCommand({
QueryExecutionId: data.queryExecutionId,
})
const response = await client.send(command)
const execution = response.QueryExecution
if (!execution) {
throw new Error('No query execution data returned')
}
return NextResponse.json({
success: true,
output: {
queryExecutionId: execution.QueryExecutionId ?? data.queryExecutionId,
query: execution.Query ?? '',
state: execution.Status?.State ?? 'UNKNOWN',
stateChangeReason: execution.Status?.StateChangeReason ?? null,
statementType: execution.StatementType ?? null,
database: execution.QueryExecutionContext?.Database ?? null,
catalog: execution.QueryExecutionContext?.Catalog ?? null,
workGroup: execution.WorkGroup ?? null,
submissionDateTime: execution.Status?.SubmissionDateTime?.getTime() ?? null,
completionDateTime: execution.Status?.CompletionDateTime?.getTime() ?? null,
dataScannedInBytes: execution.Statistics?.DataScannedInBytes ?? null,
engineExecutionTimeInMillis: execution.Statistics?.EngineExecutionTimeInMillis ?? null,
queryPlanningTimeInMillis: execution.Statistics?.QueryPlanningTimeInMillis ?? null,
queryQueueTimeInMillis: execution.Statistics?.QueryQueueTimeInMillis ?? null,
totalExecutionTimeInMillis: execution.Statistics?.TotalExecutionTimeInMillis ?? null,
outputLocation: execution.ResultConfiguration?.OutputLocation ?? null,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to get Athena query execution'
logger.error('GetQueryExecution failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,88 @@
import { GetQueryResultsCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaGetQueryResults')
const GetQueryResultsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
queryExecutionId: z.string().min(1, 'Query execution ID is required'),
maxResults: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().positive().max(999).optional()
),
nextToken: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = GetQueryResultsSchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const isFirstPage = !data.nextToken
const adjustedMaxResults =
data.maxResults !== undefined && isFirstPage ? data.maxResults + 1 : data.maxResults
const command = new GetQueryResultsCommand({
QueryExecutionId: data.queryExecutionId,
...(adjustedMaxResults !== undefined && { MaxResults: adjustedMaxResults }),
...(data.nextToken && { NextToken: data.nextToken }),
})
const response = await client.send(command)
const columnInfo = response.ResultSet?.ResultSetMetadata?.ColumnInfo ?? []
const columns = columnInfo.map((col) => ({
name: col.Name ?? '',
type: col.Type ?? 'varchar',
}))
const rawRows = response.ResultSet?.Rows ?? []
const dataRows = data.nextToken ? rawRows : rawRows.slice(1)
const rows = dataRows.map((row) => {
const record: Record<string, string> = {}
const rowData = row.Data ?? []
for (let i = 0; i < columns.length; i++) {
record[columns[i].name] = rowData[i]?.VarCharValue ?? ''
}
return record
})
return NextResponse.json({
success: true,
output: {
columns,
rows,
nextToken: response.NextToken ?? null,
updateCount: response.UpdateCount ?? null,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to get Athena query results'
logger.error('GetQueryResults failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,65 @@
import { ListNamedQueriesCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaListNamedQueries')
const ListNamedQueriesSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
workGroup: z.string().optional(),
maxResults: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().min(0).max(50).optional()
),
nextToken: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = ListNamedQueriesSchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new ListNamedQueriesCommand({
...(data.workGroup && { WorkGroup: data.workGroup }),
...(data.maxResults !== undefined && { MaxResults: data.maxResults }),
...(data.nextToken && { NextToken: data.nextToken }),
})
const response = await client.send(command)
return NextResponse.json({
success: true,
output: {
namedQueryIds: response.NamedQueryIds ?? [],
nextToken: response.NextToken ?? null,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to list Athena named queries'
logger.error('ListNamedQueries failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,65 @@
import { ListQueryExecutionsCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaListQueryExecutions')
const ListQueryExecutionsSchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
workGroup: z.string().optional(),
maxResults: z.preprocess(
(v) => (v === '' || v === undefined || v === null ? undefined : v),
z.number({ coerce: true }).int().min(0).max(50).optional()
),
nextToken: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = ListQueryExecutionsSchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new ListQueryExecutionsCommand({
...(data.workGroup && { WorkGroup: data.workGroup }),
...(data.maxResults !== undefined && { MaxResults: data.maxResults }),
...(data.nextToken && { NextToken: data.nextToken }),
})
const response = await client.send(command)
return NextResponse.json({
success: true,
output: {
queryExecutionIds: response.QueryExecutionIds ?? [],
nextToken: response.NextToken ?? null,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to list Athena query executions'
logger.error('ListQueryExecutions failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,80 @@
import { StartQueryExecutionCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaStartQuery')
const StartQuerySchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
queryString: z.string().min(1, 'Query string is required'),
database: z.string().optional(),
catalog: z.string().optional(),
outputLocation: z.string().optional(),
workGroup: z.string().optional(),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = StartQuerySchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new StartQueryExecutionCommand({
QueryString: data.queryString,
...(data.database || data.catalog
? {
QueryExecutionContext: {
...(data.database && { Database: data.database }),
...(data.catalog && { Catalog: data.catalog }),
},
}
: {}),
...(data.outputLocation
? {
ResultConfiguration: {
OutputLocation: data.outputLocation,
},
}
: {}),
...(data.workGroup && { WorkGroup: data.workGroup }),
})
const response = await client.send(command)
if (!response.QueryExecutionId) {
throw new Error('No query execution ID returned')
}
return NextResponse.json({
success: true,
output: {
queryExecutionId: response.QueryExecutionId,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Failed to start Athena query'
logger.error('StartQuery failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,56 @@
import { StopQueryExecutionCommand } from '@aws-sdk/client-athena'
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalAuth } from '@/lib/auth/hybrid'
import { createAthenaClient } from '@/app/api/tools/athena/utils'
const logger = createLogger('AthenaStopQuery')
const StopQuerySchema = z.object({
region: z.string().min(1, 'AWS region is required'),
accessKeyId: z.string().min(1, 'AWS access key ID is required'),
secretAccessKey: z.string().min(1, 'AWS secret access key is required'),
queryExecutionId: z.string().min(1, 'Query execution ID is required'),
})
export async function POST(request: NextRequest) {
try {
const auth = await checkInternalAuth(request)
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
}
const body = await request.json()
const data = StopQuerySchema.parse(body)
const client = createAthenaClient({
region: data.region,
accessKeyId: data.accessKeyId,
secretAccessKey: data.secretAccessKey,
})
const command = new StopQueryExecutionCommand({
QueryExecutionId: data.queryExecutionId,
})
await client.send(command)
return NextResponse.json({
success: true,
output: {
success: true,
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'Failed to stop Athena query'
logger.error('StopQuery failed', { error: errorMessage })
return NextResponse.json({ error: errorMessage }, { status: 500 })
}
}

View File

@@ -0,0 +1,17 @@
import { AthenaClient } from '@aws-sdk/client-athena'
interface AwsCredentials {
region: string
accessKeyId: string
secretAccessKey: string
}
export function createAthenaClient(config: AwsCredentials): AthenaClient {
return new AthenaClient({
region: config.region,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
})
}

View File

@@ -53,6 +53,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe stack drift detection status'
logger.error('DescribeStackDriftDetectionStatus failed', { error: errorMessage })

View File

@@ -70,6 +70,12 @@ export async function POST(request: NextRequest) {
output: { events },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudFormation stack events'
logger.error('DescribeStackEvents failed', { error: errorMessage })

View File

@@ -78,6 +78,12 @@ export async function POST(request: NextRequest) {
output: { stacks },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudFormation stacks'
logger.error('DescribeStacks failed', { error: errorMessage })

View File

@@ -48,6 +48,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to detect CloudFormation stack drift'
logger.error('DetectStackDrift failed', { error: errorMessage })

View File

@@ -45,6 +45,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to get CloudFormation template'
logger.error('GetTemplate failed', { error: errorMessage })

View File

@@ -67,6 +67,12 @@ export async function POST(request: NextRequest) {
output: { resources },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to list CloudFormation stack resources'
logger.error('ListStackResources failed', { error: errorMessage })

View File

@@ -53,6 +53,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to validate CloudFormation template'
logger.error('ValidateTemplate failed', { error: errorMessage })

View File

@@ -88,6 +88,12 @@ export async function POST(request: NextRequest) {
output: { alarms: [...metricAlarms, ...compositeAlarms] },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch alarms'
logger.error('DescribeAlarms failed', { error: errorMessage })

View File

@@ -54,6 +54,12 @@ export async function POST(request: NextRequest) {
output: { logGroups },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch log groups'
logger.error('DescribeLogGroups failed', { error: errorMessage })

View File

@@ -44,6 +44,12 @@ export async function POST(request: NextRequest) {
output: { logStreams: result.logStreams },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to describe CloudWatch log streams'
logger.error('DescribeLogStreams failed', { error: errorMessage })

View File

@@ -52,6 +52,12 @@ export async function POST(request: NextRequest) {
output: { events: result.events },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to get CloudWatch log events'
logger.error('GetLogEvents failed', { error: errorMessage })

View File

@@ -89,6 +89,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to get CloudWatch metric statistics'
logger.error('GetMetricStatistics failed', { error: errorMessage })

View File

@@ -62,6 +62,12 @@ export async function POST(request: NextRequest) {
output: { metrics },
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'Failed to list CloudWatch metrics'
logger.error('ListMetrics failed', { error: errorMessage })

View File

@@ -63,6 +63,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage =
error instanceof Error ? error.message : 'CloudWatch Log Insights query failed'
logger.error('QueryLogs failed', { error: errorMessage })

View File

@@ -41,6 +41,12 @@ export async function POST(request: NextRequest) {
message: 'Item deleted successfully',
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB delete failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -48,6 +48,12 @@ export async function POST(request: NextRequest) {
item: result.item,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB get failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -36,6 +36,12 @@ export async function POST(request: NextRequest) {
item: validatedData.item,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB put failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -51,6 +51,12 @@ export async function POST(request: NextRequest) {
count: result.count,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB query failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -45,6 +45,12 @@ export async function POST(request: NextRequest) {
count: result.count,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB scan failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -50,6 +50,12 @@ export async function POST(request: NextRequest) {
item: result.attributes,
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
const errorMessage = error instanceof Error ? error.message : 'DynamoDB update failed'
return NextResponse.json({ error: errorMessage }, { status: 500 })
}

View File

@@ -240,6 +240,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error downloading Google Drive file:`, error)
return NextResponse.json(
{

View File

@@ -165,6 +165,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error downloading OneDrive file:`, error)
return NextResponse.json(
{

View File

@@ -176,6 +176,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error creating Outlook draft:`, error)
return NextResponse.json(
{

View File

@@ -189,6 +189,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error sending Outlook email:`, error)
return NextResponse.json(
{

View File

@@ -158,6 +158,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error downloading Slack file:`, error)
return NextResponse.json(
{

View File

@@ -84,6 +84,12 @@ export async function POST(request: NextRequest) {
},
})
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error sending ephemeral message:`, error)
return NextResponse.json(
{

View File

@@ -77,6 +77,12 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ success: true, output: result.output })
} catch (error) {
if (error instanceof z.ZodError) {
return NextResponse.json(
{ success: false, error: error.errors[0]?.message ?? 'Invalid request' },
{ status: 400 }
)
}
logger.error(`[${requestId}] Error sending Slack message:`, error)
return NextResponse.json(
{

View File

@@ -3,31 +3,36 @@ import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { acquireLock, releaseLock } from '@/lib/core/config/redis'
import { generateShortId } from '@/lib/core/utils/uuid'
import { pollRssWebhooks } from '@/lib/webhooks/rss-polling-service'
import { pollProvider, VALID_POLLING_PROVIDERS } from '@/lib/webhooks/polling'
const logger = createLogger('RssPollingAPI')
const logger = createLogger('PollingAPI')
/** Lock TTL in seconds — must match maxDuration so the lock auto-expires if the function times out. */
const LOCK_TTL_SECONDS = 180
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
export const maxDuration = 180
const LOCK_KEY = 'rss-polling-lock'
const LOCK_TTL_SECONDS = 180 // Same as maxDuration (3 min)
export async function GET(request: NextRequest) {
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ provider: string }> }
) {
const { provider } = await params
const requestId = generateShortId()
logger.info(`RSS webhook polling triggered (${requestId})`)
const LOCK_KEY = `${provider}-polling-lock`
let lockValue: string | undefined
try {
const authError = verifyCronAuth(request, 'RSS webhook polling')
if (authError) {
return authError
const authError = verifyCronAuth(request, `${provider} webhook polling`)
if (authError) return authError
if (!VALID_POLLING_PROVIDERS.has(provider)) {
return NextResponse.json({ error: `Unknown polling provider: ${provider}` }, { status: 404 })
}
lockValue = requestId
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
if (!locked) {
return NextResponse.json(
{
@@ -40,21 +45,21 @@ export async function GET(request: NextRequest) {
)
}
const results = await pollRssWebhooks()
const results = await pollProvider(provider)
return NextResponse.json({
success: true,
message: 'RSS polling completed',
message: `${provider} polling completed`,
requestId,
status: 'completed',
...results,
})
} catch (error) {
logger.error(`Error during RSS polling (${requestId}):`, error)
logger.error(`Error during ${provider} polling (${requestId}):`, error)
return NextResponse.json(
{
success: false,
message: 'RSS polling failed',
message: `${provider} polling failed`,
error: error instanceof Error ? error.message : 'Unknown error',
requestId,
},

View File

@@ -1,68 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { acquireLock, releaseLock } from '@/lib/core/config/redis'
import { generateShortId } from '@/lib/core/utils/uuid'
import { pollGmailWebhooks } from '@/lib/webhooks/gmail-polling-service'
const logger = createLogger('GmailPollingAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
const LOCK_KEY = 'gmail-polling-lock'
const LOCK_TTL_SECONDS = 180 // Same as maxDuration (3 min)
export async function GET(request: NextRequest) {
const requestId = generateShortId()
logger.info(`Gmail webhook polling triggered (${requestId})`)
let lockValue: string | undefined
try {
const authError = verifyCronAuth(request, 'Gmail webhook polling')
if (authError) {
return authError
}
lockValue = requestId // unique value to identify the holder
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
if (!locked) {
return NextResponse.json(
{
success: true,
message: 'Polling already in progress skipped',
requestId,
status: 'skip',
},
{ status: 202 }
)
}
const results = await pollGmailWebhooks()
return NextResponse.json({
success: true,
message: 'Gmail polling completed',
requestId,
status: 'completed',
...results,
})
} catch (error) {
logger.error(`Error during Gmail polling (${requestId}):`, error)
return NextResponse.json(
{
success: false,
message: 'Gmail polling failed',
error: error instanceof Error ? error.message : 'Unknown error',
requestId,
},
{ status: 500 }
)
} finally {
if (lockValue) {
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
}
}
}

View File

@@ -1,68 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { acquireLock, releaseLock } from '@/lib/core/config/redis'
import { generateShortId } from '@/lib/core/utils/uuid'
import { pollImapWebhooks } from '@/lib/webhooks/imap-polling-service'
const logger = createLogger('ImapPollingAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
const LOCK_KEY = 'imap-polling-lock'
const LOCK_TTL_SECONDS = 180 // Same as maxDuration (3 min)
export async function GET(request: NextRequest) {
const requestId = generateShortId()
logger.info(`IMAP webhook polling triggered (${requestId})`)
let lockValue: string | undefined
try {
const authError = verifyCronAuth(request, 'IMAP webhook polling')
if (authError) {
return authError
}
lockValue = requestId // unique value to identify the holder
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
if (!locked) {
return NextResponse.json(
{
success: true,
message: 'Polling already in progress skipped',
requestId,
status: 'skip',
},
{ status: 202 }
)
}
const results = await pollImapWebhooks()
return NextResponse.json({
success: true,
message: 'IMAP polling completed',
requestId,
status: 'completed',
...results,
})
} catch (error) {
logger.error(`Error during IMAP polling (${requestId}):`, error)
return NextResponse.json(
{
success: false,
message: 'IMAP polling failed',
error: error instanceof Error ? error.message : 'Unknown error',
requestId,
},
{ status: 500 }
)
} finally {
if (lockValue) {
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
}
}
}

View File

@@ -1,68 +0,0 @@
import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { verifyCronAuth } from '@/lib/auth/internal'
import { acquireLock, releaseLock } from '@/lib/core/config/redis'
import { generateShortId } from '@/lib/core/utils/uuid'
import { pollOutlookWebhooks } from '@/lib/webhooks/outlook-polling-service'
const logger = createLogger('OutlookPollingAPI')
export const dynamic = 'force-dynamic'
export const maxDuration = 180 // Allow up to 3 minutes for polling to complete
const LOCK_KEY = 'outlook-polling-lock'
const LOCK_TTL_SECONDS = 180 // Same as maxDuration (3 min)
export async function GET(request: NextRequest) {
const requestId = generateShortId()
logger.info(`Outlook webhook polling triggered (${requestId})`)
let lockValue: string | undefined
try {
const authError = verifyCronAuth(request, 'Outlook webhook polling')
if (authError) {
return authError
}
lockValue = requestId // unique value to identify the holder
const locked = await acquireLock(LOCK_KEY, lockValue, LOCK_TTL_SECONDS)
if (!locked) {
return NextResponse.json(
{
success: true,
message: 'Polling already in progress skipped',
requestId,
status: 'skip',
},
{ status: 202 }
)
}
const results = await pollOutlookWebhooks()
return NextResponse.json({
success: true,
message: 'Outlook polling completed',
requestId,
status: 'completed',
...results,
})
} catch (error) {
logger.error(`Error during Outlook polling (${requestId}):`, error)
return NextResponse.json(
{
success: false,
message: 'Outlook polling failed',
error: error instanceof Error ? error.message : 'Unknown error',
requestId,
},
{ status: 500 }
)
} finally {
if (lockValue) {
await releaseLock(LOCK_KEY, lockValue).catch(() => {})
}
}
}

View File

@@ -2,6 +2,8 @@ import React, { type HTMLAttributes, memo, type ReactNode, useMemo } from 'react
import ReactMarkdown from 'react-markdown'
import remarkGfm from 'remark-gfm'
import { Tooltip } from '@/components/emcn'
import { CopyCodeButton } from '@/components/ui/copy-code-button'
import { extractTextContent } from '@/lib/core/utils/react-node-text'
export function LinkWithPreview({ href, children }: { href: string; children: React.ReactNode }) {
return (
@@ -102,6 +104,10 @@ function createCustomComponents(LinkComponent: typeof LinkWithPreview) {
<span className='font-sans text-gray-400 text-xs'>
{codeProps.className?.replace('language-', '') || 'code'}
</span>
<CopyCodeButton
code={extractTextContent(codeContent)}
className='text-gray-400 hover:bg-gray-700 hover:text-gray-200'
/>
</div>
<pre className='overflow-x-auto p-4 font-mono text-gray-200 dark:text-gray-100'>
{codeContent}

View File

@@ -0,0 +1,45 @@
import { Blimp, Database, Folder as FolderIcon, Table as TableIcon } from '@/components/emcn/icons'
import { getDocumentIcon } from '@/components/icons/document-icons'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import type { ChatMessageContext } from '@/app/workspace/[workspaceId]/home/types'
interface ContextMentionIconProps {
context: ChatMessageContext
/** Only used when context.kind is 'workflow' or 'current_workflow'; ignored otherwise. */
workflowColor?: string | null
/** Applied to every icon element. Include sizing and positional classes (e.g. h-[12px] w-[12px]). */
className: string
}
/** Renders the icon for a context mention chip. Returns null when no icon applies. */
export function ContextMentionIcon({ context, workflowColor, className }: ContextMentionIconProps) {
switch (context.kind) {
case 'workflow':
case 'current_workflow':
return workflowColor ? (
<span
className={cn('rounded-[3px] border-[2px]', className)}
style={{
backgroundColor: workflowColor,
borderColor: workflowBorderColor(workflowColor),
backgroundClip: 'padding-box',
}}
/>
) : null
case 'knowledge':
return <Database className={className} />
case 'table':
return <TableIcon className={className} />
case 'file': {
const FileDocIcon = getDocumentIcon('', context.label)
return <FileDocIcon className={className} />
}
case 'folder':
return <FolderIcon className={className} />
case 'past_chat':
return <Blimp className={className} />
default:
return null
}
}

View File

@@ -1,4 +1,5 @@
export { ChatMessageAttachments } from './chat-message-attachments'
export { ContextMentionIcon } from './context-mention-icon'
export {
assistantMessageHasRenderableContent,
MessageContent,

View File

@@ -9,7 +9,9 @@ import 'prismjs/components/prism-css'
import 'prismjs/components/prism-markup'
import '@/components/emcn/components/code/code.css'
import { Checkbox, highlight, languages } from '@/components/emcn'
import { CopyCodeButton } from '@/components/ui/copy-code-button'
import { cn } from '@/lib/core/utils/cn'
import { extractTextContent } from '@/lib/core/utils/react-node-text'
import {
PendingTagIndicator,
parseSpecialTags,
@@ -33,16 +35,6 @@ const LANG_ALIASES: Record<string, string> = {
py: 'python',
}
function extractTextContent(node: React.ReactNode): string {
if (typeof node === 'string') return node
if (typeof node === 'number') return String(node)
if (!node) return ''
if (Array.isArray(node)) return node.map(extractTextContent).join('')
if (isValidElement(node))
return extractTextContent((node.props as { children?: React.ReactNode }).children)
return ''
}
const PROSE_CLASSES = cn(
'prose prose-base dark:prose-invert max-w-none',
'font-[family-name:var(--font-inter)] antialiased break-words font-[430] tracking-[0]',
@@ -125,11 +117,13 @@ const MARKDOWN_COMPONENTS: React.ComponentProps<typeof ReactMarkdown>['component
return (
<div className='not-prose my-6 overflow-hidden rounded-lg border border-[var(--divider)]'>
{language && (
<div className='border-[var(--divider)] border-b bg-[var(--surface-4)] px-4 py-2 text-[var(--text-tertiary)] text-xs dark:bg-[var(--surface-4)]'>
{language}
</div>
)}
<div className='flex items-center justify-between border-[var(--divider)] border-b bg-[var(--surface-4)] px-4 py-2 dark:bg-[var(--surface-4)]'>
<span className='text-[var(--text-tertiary)] text-xs'>{language || 'code'}</span>
<CopyCodeButton
code={codeString}
className='text-[var(--text-tertiary)] hover:bg-[var(--surface-5)] hover:text-[var(--text-secondary)]'
/>
</div>
<div className='code-editor-theme bg-[var(--surface-5)] dark:bg-[var(--code-bg)]'>
<pre
className='m-0 overflow-x-auto whitespace-pre p-4 font-[430] font-mono text-[var(--text-primary)] text-small leading-[21px]'

View File

@@ -37,6 +37,7 @@ interface MothershipChatProps {
userId?: string
chatId?: string
onContextAdd?: (context: ChatContext) => void
onContextRemove?: (context: ChatContext) => void
editValue?: string
onEditValueConsumed?: () => void
layout?: 'mothership-view' | 'copilot-view'
@@ -83,6 +84,7 @@ export function MothershipChat({
userId,
chatId,
onContextAdd,
onContextRemove,
editValue,
onEditValueConsumed,
layout = 'mothership-view',
@@ -207,6 +209,7 @@ export function MothershipChat({
isInitialView={false}
userId={userId}
onContextAdd={onContextAdd}
onContextRemove={onContextRemove}
editValue={editValue}
onEditValueConsumed={onEditValueConsumed}
onEnterWhileEmpty={handleEnterWhileEmpty}

View File

@@ -27,6 +27,7 @@ import type {
import { useFolders } from '@/hooks/queries/folders'
import { useKnowledgeBasesQuery } from '@/hooks/queries/kb/knowledge'
import { useTablesList } from '@/hooks/queries/tables'
import { useTasks } from '@/hooks/queries/tasks'
import { useWorkflows } from '@/hooks/queries/workflows'
import { useWorkspaceFiles } from '@/hooks/queries/workspace-files'
@@ -53,6 +54,7 @@ export function useAvailableResources(
const { data: files = [] } = useWorkspaceFiles(workspaceId)
const { data: knowledgeBases } = useKnowledgeBasesQuery(workspaceId)
const { data: folders = [] } = useFolders(workspaceId)
const { data: tasks = [] } = useTasks(workspaceId)
return useMemo(
() => [
@@ -97,8 +99,16 @@ export function useAvailableResources(
isOpen: existingKeys.has(`knowledgebase:${kb.id}`),
})),
},
{
type: 'task' as const,
items: tasks.map((t) => ({
id: t.id,
name: t.name,
isOpen: existingKeys.has(`task:${t.id}`),
})),
},
],
[workflows, folders, tables, files, knowledgeBases, existingKeys]
[workflows, folders, tables, files, knowledgeBases, tasks, existingKeys]
)
}

View File

@@ -22,6 +22,7 @@ import {
getFileExtension,
getMimeTypeFromExtension,
} from '@/lib/uploads/utils/file-utils'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import {
FileViewer,
type PreviewMode,
@@ -514,7 +515,7 @@ function EmbeddedFolder({ workspaceId, folderId }: EmbeddedFolderProps) {
className='h-[12px] w-[12px] flex-shrink-0 rounded-[3px] border-[2px]'
style={{
backgroundColor: w.color,
borderColor: `${w.color}60`,
borderColor: workflowBorderColor(w.color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -4,6 +4,7 @@ import { type ElementType, type ReactNode, useMemo } from 'react'
import type { QueryClient } from '@tanstack/react-query'
import { useParams } from 'next/navigation'
import {
Blimp,
Database,
File as FileIcon,
Folder as FolderIcon,
@@ -13,12 +14,14 @@ import {
import { WorkflowIcon } from '@/components/icons'
import { getDocumentIcon } from '@/components/icons/document-icons'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import type {
MothershipResource,
MothershipResourceType,
} from '@/app/workspace/[workspaceId]/home/types'
import { knowledgeKeys } from '@/hooks/queries/kb/knowledge'
import { tableKeys } from '@/hooks/queries/tables'
import { taskKeys } from '@/hooks/queries/tasks'
import { folderKeys } from '@/hooks/queries/utils/folder-keys'
import { invalidateWorkflowLists } from '@/hooks/queries/utils/invalidate-workflow-lists'
import { useWorkflows } from '@/hooks/queries/workflows'
@@ -48,7 +51,7 @@ function WorkflowTabSquare({ workflowId, className }: { workflowId: string; clas
className={cn('flex-shrink-0 rounded-[3px] border-[2px]', className)}
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>
@@ -63,7 +66,7 @@ function WorkflowDropdownItem({ item }: DropdownItemRenderProps) {
className='h-[14px] w-[14px] flex-shrink-0 rounded-[3px] border-[2px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>
@@ -151,6 +154,15 @@ export const RESOURCE_REGISTRY: Record<MothershipResourceType, ResourceTypeConfi
),
renderDropdownItem: (props) => <IconDropdownItem {...props} icon={FolderIcon} />,
},
task: {
type: 'task',
label: 'Tasks',
icon: Blimp,
renderTabIcon: (_resource, className) => (
<Blimp className={cn(className, 'text-[var(--text-icon)]')} />
),
renderDropdownItem: (props) => <IconDropdownItem {...props} icon={Blimp} />,
},
} as const
export const RESOURCE_TYPES = Object.values(RESOURCE_REGISTRY)
@@ -185,6 +197,9 @@ const RESOURCE_INVALIDATORS: Record<
folder: (qc) => {
qc.invalidateQueries({ queryKey: folderKeys.lists() })
},
task: (qc, wId) => {
qc.invalidateQueries({ queryKey: taskKeys.list(wId) })
},
}
/**

View File

@@ -10,6 +10,7 @@ import {
import { Button, Tooltip } from '@/components/emcn'
import { Columns3, Eye, PanelLeft, Pencil } from '@/components/emcn/icons'
import { isEphemeralResource } from '@/lib/copilot/resource-extraction'
import { SIM_RESOURCE_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { cn } from '@/lib/core/utils/cn'
import type { PreviewMode } from '@/app/workspace/[workspaceId]/files/components/file-viewer'
import { AddResourceDropdown } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown'
@@ -164,7 +165,7 @@ export function ResourceTabs({
const resource = resources[idx]
if (resource) {
e.dataTransfer.setData(
'application/x-sim-resource',
SIM_RESOURCE_DRAG_TYPE,
JSON.stringify({ type: resource.type, id: resource.id, title: resource.title })
)
}

View File

@@ -89,6 +89,8 @@ export function mapResourceToContext(resource: MothershipResource): ChatContext
return { kind: 'file', fileId: resource.id, label: resource.title }
case 'folder':
return { kind: 'folder', folderId: resource.id, label: resource.title }
case 'task':
return { kind: 'past_chat', chatId: resource.id, label: resource.title }
default:
return { kind: 'docs', label: resource.title }
}

View File

@@ -81,7 +81,7 @@ export const PlusMenuDropdown = React.memo(
e.preventDefault()
const firstItem = contentRef.current?.querySelector<HTMLElement>('[role="menuitem"]')
firstItem?.focus()
} else if (e.key === 'Enter') {
} else if (e.key === 'Enter' || e.key === 'Tab') {
e.preventDefault()
const first = filteredItemsRef.current?.[0]
if (first) handleSelect({ type: first.type, id: first.item.id, title: first.item.name })
@@ -99,6 +99,12 @@ export const PlusMenuDropdown = React.memo(
e.preventDefault()
searchRef.current?.focus()
}
} else if (e.key === 'Tab') {
const focused = document.activeElement as HTMLElement | null
if (focused?.getAttribute('role') === 'menuitem') {
e.preventDefault()
focused.click()
}
}
}, [])

View File

@@ -3,11 +3,11 @@
import type React from 'react'
import { useCallback, useEffect, useLayoutEffect, useMemo, useRef, useState } from 'react'
import { useParams } from 'next/navigation'
import { Database, Folder as FolderIcon, Table as TableIcon } from '@/components/emcn/icons'
import { getDocumentIcon } from '@/components/icons/document-icons'
import { useSession } from '@/lib/auth/auth-client'
import { SIM_RESOURCE_DRAG_TYPE, SIM_RESOURCES_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { cn } from '@/lib/core/utils/cn'
import { CHAT_ACCEPT_ATTRIBUTE } from '@/lib/uploads/utils/validation'
import { ContextMentionIcon } from '@/app/workspace/[workspaceId]/home/components/context-mention-icon'
import { useAvailableResources } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/add-resource-dropdown'
import type {
PlusMenuHandle,
@@ -108,6 +108,7 @@ interface UserInputProps {
isInitialView?: boolean
userId?: string
onContextAdd?: (context: ChatContext) => void
onContextRemove?: (context: ChatContext) => void
onEnterWhileEmpty?: () => boolean
}
@@ -121,6 +122,7 @@ export function UserInput({
isInitialView = true,
userId,
onContextAdd,
onContextRemove,
onEnterWhileEmpty,
}: UserInputProps) {
const { workspaceId } = useParams<{ workspaceId: string }>()
@@ -170,6 +172,37 @@ export function UserInput({
[addContext, onContextAdd]
)
const onContextRemoveRef = useRef(onContextRemove)
onContextRemoveRef.current = onContextRemove
const prevSelectedContextsRef = useRef<ChatContext[]>([])
useEffect(() => {
const prev = prevSelectedContextsRef.current
const curr = contextManagement.selectedContexts
const contextId = (ctx: ChatContext): string => {
switch (ctx.kind) {
case 'workflow':
case 'current_workflow':
return `${ctx.kind}:${ctx.workflowId}`
case 'knowledge':
return `knowledge:${ctx.knowledgeId ?? ''}`
case 'table':
return `table:${ctx.tableId}`
case 'file':
return `file:${ctx.fileId}`
case 'folder':
return `folder:${ctx.folderId}`
case 'past_chat':
return `past_chat:${ctx.chatId}`
default:
return `${ctx.kind}:${ctx.label}`
}
}
const removed = prev.filter((p) => !curr.some((c) => contextId(c) === contextId(p)))
if (removed.length > 0) removed.forEach((ctx) => onContextRemoveRef.current?.(ctx))
prevSelectedContextsRef.current = curr
}, [contextManagement.selectedContexts])
const existingResourceKeys = useMemo(() => {
const keys = new Set<string>()
for (const ctx of contextManagement.selectedContexts) {
@@ -178,6 +211,7 @@ export function UserInput({
if (ctx.kind === 'table' && ctx.tableId) keys.add(`table:${ctx.tableId}`)
if (ctx.kind === 'file' && ctx.fileId) keys.add(`file:${ctx.fileId}`)
if (ctx.kind === 'folder' && ctx.folderId) keys.add(`folder:${ctx.folderId}`)
if (ctx.kind === 'past_chat' && ctx.chatId) keys.add(`task:${ctx.chatId}`)
}
return keys
}, [contextManagement.selectedContexts])
@@ -247,15 +281,17 @@ export function UserInput({
if (textarea) {
const currentValue = valueRef.current
const insertAt = atInsertPosRef.current ?? textarea.selectionStart ?? currentValue.length
atInsertPosRef.current = null
const needsSpaceBefore = insertAt > 0 && !/\s/.test(currentValue.charAt(insertAt - 1))
const insertText = `${needsSpaceBefore ? ' ' : ''}@${resource.title} `
const before = currentValue.slice(0, insertAt)
const after = currentValue.slice(insertAt)
const newValue = `${before}${insertText}${after}`
const newPos = before.length + insertText.length
pendingCursorRef.current = newPos
setValue(`${before}${insertText}${after}`)
// Eagerly sync refs so successive drop-handler iterations see the updated position
valueRef.current = newValue
atInsertPosRef.current = newPos
setValue(newValue)
}
const context = mapResourceToContext(resource)
@@ -281,7 +317,10 @@ export function UserInput({
}, [])
const handleContainerDragOver = useCallback((e: React.DragEvent) => {
if (e.dataTransfer.types.includes('application/x-sim-resource')) {
if (
e.dataTransfer.types.includes(SIM_RESOURCE_DRAG_TYPE) ||
e.dataTransfer.types.includes(SIM_RESOURCES_DRAG_TYPE)
) {
e.preventDefault()
e.stopPropagation()
e.dataTransfer.dropEffect = 'copy'
@@ -292,13 +331,30 @@ export function UserInput({
const handleContainerDrop = useCallback(
(e: React.DragEvent) => {
const resourceJson = e.dataTransfer.getData('application/x-sim-resource')
const resourcesJson = e.dataTransfer.getData(SIM_RESOURCES_DRAG_TYPE)
if (resourcesJson) {
e.preventDefault()
e.stopPropagation()
try {
const resources = JSON.parse(resourcesJson) as MothershipResource[]
for (const resource of resources) {
handleResourceSelect(resource)
}
// Reset after batch so the next non-drop insert uses the cursor position
atInsertPosRef.current = null
} catch {
// Invalid JSON — ignore
}
return
}
const resourceJson = e.dataTransfer.getData(SIM_RESOURCE_DRAG_TYPE)
if (resourceJson) {
e.preventDefault()
e.stopPropagation()
try {
const resource = JSON.parse(resourceJson) as MothershipResource
handleResourceSelect(resource)
atInsertPosRef.current = null
} catch {
// Invalid JSON — ignore
}
@@ -310,11 +366,17 @@ export function UserInput({
)
const handleDragEnter = useCallback((e: React.DragEvent) => {
filesRef.current.handleDragEnter(e)
const isResourceDrag =
e.dataTransfer.types.includes(SIM_RESOURCE_DRAG_TYPE) ||
e.dataTransfer.types.includes(SIM_RESOURCES_DRAG_TYPE)
if (!isResourceDrag) filesRef.current.handleDragEnter(e)
}, [])
const handleDragLeave = useCallback((e: React.DragEvent) => {
filesRef.current.handleDragLeave(e)
const isResourceDrag =
e.dataTransfer.types.includes(SIM_RESOURCE_DRAG_TYPE) ||
e.dataTransfer.types.includes(SIM_RESOURCES_DRAG_TYPE)
if (!isResourceDrag) filesRef.current.handleDragLeave(e)
}, [])
const handleFileChange = useCallback((e: React.ChangeEvent<HTMLInputElement>) => {
@@ -643,42 +705,17 @@ export function UserInput({
: range.token
const matchingCtx = contexts.find((c) => c.label === mentionLabel)
let mentionIconNode: React.ReactNode = null
if (matchingCtx) {
const iconClasses = 'absolute inset-0 m-auto h-[12px] w-[12px] text-[var(--text-icon)]'
switch (matchingCtx.kind) {
case 'workflow':
case 'current_workflow': {
const wfId = (matchingCtx as { workflowId: string }).workflowId
const wfColor = workflowsById[wfId]?.color ?? '#888'
mentionIconNode = (
<div
className='absolute inset-0 m-auto h-[12px] w-[12px] rounded-[3px] border-[2px]'
style={{
backgroundColor: wfColor,
borderColor: `${wfColor}60`,
backgroundClip: 'padding-box',
}}
/>
)
break
}
case 'knowledge':
mentionIconNode = <Database className={iconClasses} />
break
case 'table':
mentionIconNode = <TableIcon className={iconClasses} />
break
case 'file': {
const FileDocIcon = getDocumentIcon('', mentionLabel)
mentionIconNode = <FileDocIcon className={iconClasses} />
break
}
case 'folder':
mentionIconNode = <FolderIcon className={iconClasses} />
break
}
}
const wfId =
matchingCtx?.kind === 'workflow' || matchingCtx?.kind === 'current_workflow'
? matchingCtx.workflowId
: undefined
const mentionIconNode = matchingCtx ? (
<ContextMentionIcon
context={matchingCtx}
workflowColor={wfId ? (workflowsById[wfId]?.color ?? null) : null}
className='absolute inset-0 m-auto h-[12px] w-[12px] text-[var(--text-icon)]'
/>
) : null
elements.push(
<span

View File

@@ -2,8 +2,7 @@
import { useMemo } from 'react'
import { useParams } from 'next/navigation'
import { Database, Folder as FolderIcon, Table as TableIcon } from '@/components/emcn/icons'
import { getDocumentIcon } from '@/components/icons/document-icons'
import { ContextMentionIcon } from '@/app/workspace/[workspaceId]/home/components/context-mention-icon'
import type { ChatMessageContext } from '@/app/workspace/[workspaceId]/home/types'
import { useWorkflows } from '@/hooks/queries/workflows'
@@ -53,42 +52,13 @@ function MentionHighlight({ context }: { context: ChatMessageContext }) {
return (workflowList ?? []).find((w) => w.id === context.workflowId)?.color ?? null
}, [workflowList, context.kind, context.workflowId])
let icon: React.ReactNode = null
const iconClasses = 'h-[12px] w-[12px] flex-shrink-0 text-[var(--text-icon)]'
switch (context.kind) {
case 'workflow':
case 'current_workflow':
icon = workflowColor ? (
<span
className='inline-block h-[12px] w-[12px] flex-shrink-0 rounded-[3px] border-[2px]'
style={{
backgroundColor: workflowColor,
borderColor: `${workflowColor}60`,
backgroundClip: 'padding-box',
}}
/>
) : null
break
case 'knowledge':
icon = <Database className={iconClasses} />
break
case 'table':
icon = <TableIcon className={iconClasses} />
break
case 'file': {
const FileDocIcon = getDocumentIcon('', context.label)
icon = <FileDocIcon className={iconClasses} />
break
}
case 'folder':
icon = <FolderIcon className={iconClasses} />
break
}
return (
<span className='inline-flex items-baseline gap-1 rounded-[5px] bg-[var(--surface-5)] px-[5px]'>
{icon && <span className='relative top-0.5 flex-shrink-0'>{icon}</span>}
<ContextMentionIcon
context={context}
workflowColor={workflowColor}
className='relative top-0.5 h-[12px] w-[12px] flex-shrink-0 text-[var(--text-icon)]'
/>
{context.label}
</span>
)

View File

@@ -17,7 +17,7 @@ import { useChatHistory, useMarkTaskRead } from '@/hooks/queries/tasks'
import type { ChatContext } from '@/stores/panel'
import { MothershipChat, MothershipView, TemplatePrompts, UserInput } from './components'
import { getMothershipUseChatOptions, useChat, useMothershipResize } from './hooks'
import type { FileAttachmentForApi, MothershipResource, MothershipResourceType } from './types'
import type { FileAttachmentForApi, MothershipResourceType } from './types'
const logger = createLogger('Home')
@@ -261,51 +261,42 @@ export function Home({ chatId }: HomeProps = {}) {
return () => window.removeEventListener('mothership-send-message', handler)
}, [sendMessage])
const handleContextAdd = useCallback(
(context: ChatContext) => {
let resourceType: MothershipResourceType | null = null
let resourceId: string | null = null
const resourceTitle: string = context.label
const resolveResourceFromContext = useCallback(
(context: ChatContext): { type: MothershipResourceType; id: string } | null => {
switch (context.kind) {
case 'workflow':
case 'current_workflow':
resourceType = 'workflow'
resourceId = context.workflowId
break
return context.workflowId ? { type: 'workflow', id: context.workflowId } : null
case 'knowledge':
if (context.knowledgeId) {
resourceType = 'knowledgebase'
resourceId = context.knowledgeId
}
break
return context.knowledgeId ? { type: 'knowledgebase', id: context.knowledgeId } : null
case 'table':
if (context.tableId) {
resourceType = 'table'
resourceId = context.tableId
}
break
return context.tableId ? { type: 'table', id: context.tableId } : null
case 'file':
if (context.fileId) {
resourceType = 'file'
resourceId = context.fileId
}
break
return context.fileId ? { type: 'file', id: context.fileId } : null
default:
break
return null
}
},
[]
)
if (resourceType && resourceId) {
const resource: MothershipResource = {
type: resourceType,
id: resourceId,
title: resourceTitle,
}
addResource(resource)
const handleContextAdd = useCallback(
(context: ChatContext) => {
const resolved = resolveResourceFromContext(context)
if (resolved) {
addResource({ ...resolved, title: context.label })
handleResourceEvent()
}
},
[addResource, handleResourceEvent]
[resolveResourceFromContext, addResource, handleResourceEvent]
)
const handleContextRemove = useCallback(
(context: ChatContext) => {
const resolved = resolveResourceFromContext(context)
if (resolved) removeResource(resolved.type, resolved.id)
},
[resolveResourceFromContext, removeResource]
)
const hasMessages = messages.length > 0
@@ -345,6 +336,7 @@ export function Home({ chatId }: HomeProps = {}) {
onStopGeneration={handleStopGeneration}
userId={session?.user?.id}
onContextAdd={handleContextAdd}
onContextRemove={handleContextRemove}
/>
</div>
</div>
@@ -375,6 +367,7 @@ export function Home({ chatId }: HomeProps = {}) {
userId={session?.user?.id}
chatId={resolvedChatId}
onContextAdd={handleContextAdd}
onContextRemove={handleContextRemove}
editValue={editingInputValue}
onEditValueConsumed={clearEditingValue}
animateInput={isInputEntering}

View File

@@ -6,6 +6,9 @@ export type {
MothershipResourceType,
} from '@/lib/copilot/resource-types'
/** Union of all valid context kind strings, derived from {@link ChatContext}. */
export type ChatContextKind = ChatContext['kind']
export interface FileAttachmentForApi {
id: string
key: string
@@ -260,13 +263,14 @@ export interface ChatMessageAttachment {
}
export interface ChatMessageContext {
kind: string
kind: ChatContextKind
label: string
workflowId?: string
knowledgeId?: string
tableId?: string
fileId?: string
folderId?: string
chatId?: string
}
export interface ChatMessage {

View File

@@ -2,7 +2,7 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { format, formatDistanceToNow } from 'date-fns'
import { format, formatDistanceToNow, isPast } from 'date-fns'
import {
AlertCircle,
CheckCircle2,
@@ -380,7 +380,9 @@ function ConnectorCard({
<span>·</span>
<span>
Next sync:{' '}
{formatDistanceToNow(new Date(connector.nextSyncAt), { addSuffix: true })}
{isPast(new Date(connector.nextSyncAt))
? 'pending'
: formatDistanceToNow(new Date(connector.nextSyncAt), { addSuffix: true })}
</span>
</>
)}

View File

@@ -1,6 +1,7 @@
import { memo } from 'react'
import { useParams } from 'next/navigation'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import {
DELETED_WORKFLOW_COLOR,
DELETED_WORKFLOW_LABEL,
@@ -93,7 +94,7 @@ function WorkflowsListInner({
className='h-[10px] w-[10px] flex-shrink-0 rounded-[3px] border-[1.5px]'
style={{
backgroundColor: workflowColor,
borderColor: `${workflowColor}60`,
borderColor: workflowBorderColor(workflowColor),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -20,6 +20,7 @@ import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { filterHiddenOutputKeys } from '@/lib/logs/execution/trace-spans/trace-spans'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import {
ExecutionSnapshot,
FileCards,
@@ -431,7 +432,7 @@ export const LogDetails = memo(function LogDetails({
className='h-[10px] w-[10px] flex-shrink-0 rounded-[3px] border-[1.5px]'
style={{
backgroundColor: c,
borderColor: c ? `${c}60` : undefined,
borderColor: c ? workflowBorderColor(c) : undefined,
backgroundClip: 'padding-box',
}}
/>

View File

@@ -8,6 +8,7 @@ import { Badge, buttonVariants } from '@/components/emcn'
import { dollarsToCredits } from '@/lib/billing/credits/conversion'
import { cn } from '@/lib/core/utils/cn'
import { formatDuration } from '@/lib/core/utils/formatting'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import {
DELETED_WORKFLOW_COLOR,
DELETED_WORKFLOW_LABEL,
@@ -90,7 +91,7 @@ const LogRow = memo(
className='h-[10px] w-[10px] flex-shrink-0 rounded-[3px] border-[1.5px]'
style={{
backgroundColor: workflowColor,
borderColor: `${workflowColor}60`,
borderColor: workflowBorderColor(workflowColor),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -20,6 +20,7 @@ import { cn } from '@/lib/core/utils/cn'
import { hasActiveFilters } from '@/lib/logs/filters'
import { getTriggerOptions } from '@/lib/logs/get-trigger-options'
import { captureEvent } from '@/lib/posthog/client'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import { type LogStatus, STATUS_CONFIG } from '@/app/workspace/[workspaceId]/logs/utils'
import { getBlock } from '@/blocks/registry'
import { useFolderMap } from '@/hooks/queries/folders'
@@ -124,7 +125,7 @@ function getColorIcon(
width: 10,
height: 10,
...(withRing && {
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box' as const,
}),
}}
@@ -604,7 +605,7 @@ export const LogsToolbar = memo(function LogsToolbar({
className='h-[8px] w-[8px] flex-shrink-0 rounded-xs border-[1.5px]'
style={{
backgroundColor: selectedWorkflow.color,
borderColor: `${selectedWorkflow.color}60`,
borderColor: workflowBorderColor(selectedWorkflow.color),
backgroundClip: 'padding-box',
}}
/>
@@ -735,7 +736,7 @@ export const LogsToolbar = memo(function LogsToolbar({
className='h-[8px] w-[8px] flex-shrink-0 rounded-xs border-[1.5px]'
style={{
backgroundColor: selectedWorkflow.color,
borderColor: `${selectedWorkflow.color}60`,
borderColor: workflowBorderColor(selectedWorkflow.color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -33,6 +33,7 @@ import {
type TriggerData,
type WorkflowData,
} from '@/lib/logs/search-suggestions'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import type {
FilterTag,
HeaderAction,
@@ -157,7 +158,7 @@ function getColorIcon(
width: 10,
height: 10,
...(withRing && {
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box' as const,
}),
}}
@@ -742,7 +743,7 @@ export default function Logs() {
className='h-[10px] w-[10px] rounded-[3px] border-[1.5px]'
style={{
backgroundColor: workflowColor,
borderColor: `${workflowColor}60`,
borderColor: workflowBorderColor(workflowColor),
backgroundClip: 'padding-box',
}}
/>
@@ -1441,7 +1442,7 @@ function LogsFilterPanel({ searchQuery, onSearchQueryChange }: LogsFilterPanelPr
className='h-[8px] w-[8px] flex-shrink-0 rounded-xs border-[1.5px]'
style={{
backgroundColor: selectedWorkflow.color,
borderColor: `${selectedWorkflow.color}60`,
borderColor: workflowBorderColor(selectedWorkflow.color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -227,123 +227,128 @@ export function Admin() {
<div
key={u.id}
className={cn(
'flex items-center gap-3 px-3 py-2 text-small',
'flex flex-col gap-2 px-3 py-2 text-small',
'border-[var(--border-secondary)] border-b last:border-b-0'
)}
>
<span className='w-[200px] truncate text-[var(--text-primary)]'>
{u.name || '—'}
</span>
<span className='flex-1 truncate text-[var(--text-secondary)]'>{u.email}</span>
<span className='w-[80px]'>
<Badge variant={u.role === 'admin' ? 'blue' : 'gray'}>{u.role || 'user'}</Badge>
</span>
<span className='w-[80px]'>
{u.banned ? (
<Badge variant='red'>Banned</Badge>
) : (
<Badge variant='green'>Active</Badge>
)}
</span>
<span className='flex w-[250px] justify-end gap-1'>
{u.id !== session?.user?.id && (
<>
<Button
variant='active'
className='h-[28px] px-2 text-[12px]'
onClick={() => handleImpersonate(u.id)}
disabled={pendingUserIds.has(u.id)}
>
{impersonatingUserId === u.id ||
(impersonateUser.isPending &&
(impersonateUser.variables as { userId?: string } | undefined)
?.userId === u.id)
? 'Switching...'
: 'Impersonate'}
</Button>
<Button
variant='active'
className='h-[28px] px-2 text-[12px]'
onClick={() => {
setUserRole.reset()
setUserRole.mutate({
userId: u.id,
role: u.role === 'admin' ? 'user' : 'admin',
})
}}
disabled={pendingUserIds.has(u.id)}
>
{u.role === 'admin' ? 'Demote' : 'Promote'}
</Button>
{u.banned ? (
<div className='flex items-center gap-3'>
<span className='w-[200px] truncate text-[var(--text-primary)]'>
{u.name || '—'}
</span>
<span className='flex-1 truncate text-[var(--text-secondary)]'>{u.email}</span>
<span className='w-[80px]'>
<Badge variant={u.role === 'admin' ? 'blue' : 'gray'}>
{u.role || 'user'}
</Badge>
</span>
<span className='w-[80px]'>
{u.banned ? (
<Badge variant='red'>Banned</Badge>
) : (
<Badge variant='green'>Active</Badge>
)}
</span>
<span className='flex w-[250px] justify-end gap-1'>
{u.id !== session?.user?.id && (
<>
<Button
variant='active'
className='h-[28px] px-2 text-caption'
className='h-[28px] px-2 text-[12px]'
onClick={() => handleImpersonate(u.id)}
disabled={pendingUserIds.has(u.id)}
>
{impersonatingUserId === u.id ||
(impersonateUser.isPending &&
(impersonateUser.variables as { userId?: string } | undefined)
?.userId === u.id)
? 'Switching...'
: 'Impersonate'}
</Button>
<Button
variant='active'
className='h-[28px] px-2 text-[12px]'
onClick={() => {
unbanUser.reset()
unbanUser.mutate({ userId: u.id })
setUserRole.reset()
setUserRole.mutate({
userId: u.id,
role: u.role === 'admin' ? 'user' : 'admin',
})
}}
disabled={pendingUserIds.has(u.id)}
>
Unban
{u.role === 'admin' ? 'Demote' : 'Promote'}
</Button>
) : banUserId === u.id ? (
<div className='flex gap-1'>
<EmcnInput
value={banReason}
onChange={(e) => setBanReason(e.target.value)}
placeholder='Reason (optional)'
className='h-[28px] w-[120px] text-caption'
/>
<Button
variant='primary'
className='h-[28px] px-2 text-caption'
onClick={() => {
banUser.reset()
banUser.mutate(
{
userId: u.id,
...(banReason.trim() ? { banReason: banReason.trim() } : {}),
},
{
onSuccess: () => {
setBanUserId(null)
setBanReason('')
},
}
)
}}
disabled={pendingUserIds.has(u.id)}
>
Confirm
</Button>
{u.banned ? (
<Button
variant='active'
className='h-[28px] px-2 text-caption'
onClick={() => {
unbanUser.reset()
unbanUser.mutate({ userId: u.id })
}}
disabled={pendingUserIds.has(u.id)}
>
Unban
</Button>
) : (
<Button
variant='active'
className={cn(
'h-[28px] px-2 text-caption',
banUserId === u.id
? 'text-[var(--text-primary)]'
: 'text-[var(--text-error)]'
)}
onClick={() => {
if (banUserId === u.id) {
setBanUserId(null)
setBanReason('')
} else {
setBanUserId(u.id)
setBanReason('')
}
}}
disabled={pendingUserIds.has(u.id)}
>
{banUserId === u.id ? 'Cancel' : 'Ban'}
</Button>
)}
</>
)}
</span>
</div>
{banUserId === u.id && !u.banned && (
<div className='flex items-center gap-2 pl-[200px]'>
<EmcnInput
value={banReason}
onChange={(e) => setBanReason(e.target.value)}
placeholder='Reason (optional)'
className='h-[28px] flex-1 text-caption'
/>
<Button
variant='primary'
className='h-[28px] px-3 text-caption'
onClick={() => {
banUser.reset()
banUser.mutate(
{
userId: u.id,
...(banReason.trim() ? { banReason: banReason.trim() } : {}),
},
{
onSuccess: () => {
setBanUserId(null)
setBanReason('')
}}
>
Cancel
</Button>
</div>
) : (
<Button
variant='active'
className='h-[28px] px-2 text-[var(--text-error)] text-caption'
onClick={() => {
setBanUserId(u.id)
setBanReason('')
}}
disabled={pendingUserIds.has(u.id)}
>
Ban
</Button>
)}
</>
)}
</span>
},
}
)
}}
disabled={pendingUserIds.has(u.id)}
>
Confirm Ban
</Button>
</div>
)}
</div>
))}
</div>

View File

@@ -2,6 +2,7 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { createLogger } from '@sim/logger'
import { useQueryClient } from '@tanstack/react-query'
import { Check, Clipboard, Key, Search } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import {
@@ -42,6 +43,7 @@ import {
useWorkspaceCredentials,
type WorkspaceCredential,
type WorkspaceCredentialRole,
workspaceCredentialKeys,
} from '@/hooks/queries/credentials'
import {
usePersonalEnvironment,
@@ -125,9 +127,11 @@ interface WorkspaceVariableRowProps {
renamingKey: string | null
pendingKeyValue: string
hasCredential: boolean
canEdit: boolean
onRenameStart: (key: string) => void
onPendingKeyChange: (value: string) => void
onRenameEnd: (key: string, value: string) => void
onValueChange: (key: string, value: string) => void
onDelete: (key: string) => void
onViewDetails: (envKey: string) => void
}
@@ -138,12 +142,16 @@ function WorkspaceVariableRow({
renamingKey,
pendingKeyValue,
hasCredential,
canEdit,
onRenameStart,
onPendingKeyChange,
onRenameEnd,
onValueChange,
onDelete,
onViewDetails,
}: WorkspaceVariableRowProps) {
const [valueFocused, setValueFocused] = useState(false)
return (
<div className='contents'>
<EmcnInput
@@ -158,13 +166,27 @@ function WorkspaceVariableRow({
autoCapitalize='off'
spellCheck='false'
readOnly
onFocus={(e) => e.target.removeAttribute('readOnly')}
onFocus={(e) => {
if (canEdit) e.target.removeAttribute('readOnly')
}}
className='h-9'
/>
<div />
<EmcnInput
value={value ? '\u2022'.repeat(value.length) : ''}
value={canEdit ? value : value ? '\u2022'.repeat(value.length) : ''}
type={canEdit && !valueFocused ? 'password' : 'text'}
onChange={(e) => onValueChange(envKey, e.target.value)}
readOnly
onFocus={(e) => {
if (canEdit) {
setValueFocused(true)
e.target.removeAttribute('readOnly')
}
}}
onBlur={() => {
if (canEdit) setValueFocused(false)
}}
name={`workspace_env_value_${envKey}_${Math.random()}`}
autoComplete='off'
autoCorrect='off'
autoCapitalize='off'
@@ -179,14 +201,18 @@ function WorkspaceVariableRow({
>
Details
</Button>
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' onClick={() => onDelete(envKey)} className='h-9 w-9'>
<Trash />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Delete secret</Tooltip.Content>
</Tooltip.Root>
{canEdit ? (
<Tooltip.Root>
<Tooltip.Trigger asChild>
<Button variant='ghost' onClick={() => onDelete(envKey)} className='h-9 w-9'>
<Trash />
</Button>
</Tooltip.Trigger>
<Tooltip.Content>Delete secret</Tooltip.Content>
</Tooltip.Root>
) : (
<div />
)}
</div>
)
}
@@ -298,6 +324,14 @@ export function CredentialsManager() {
)
const { data: workspacePermissions } = useWorkspacePermissionsQuery(workspaceId || null)
const queryClient = useQueryClient()
const isWorkspaceAdmin = useMemo(() => {
const userId = session?.user?.id
if (!userId || !workspacePermissions?.users) return false
const currentUser = workspacePermissions.users.find((user) => user.userId === userId)
return currentUser?.permissionType === 'admin'
}, [session?.user?.id, workspacePermissions?.users])
const isLoading = isPersonalLoading || isWorkspaceLoading
const variables = useMemo(() => personalEnvData || {}, [personalEnvData])
@@ -767,6 +801,10 @@ export function CredentialsManager() {
[pendingKeyValue, renamingKey]
)
const handleWorkspaceValueChange = useCallback((key: string, value: string) => {
setWorkspaceVars((prev) => ({ ...prev, [key]: value }))
}, [])
const handleDeleteWorkspaceVar = useCallback((key: string) => {
setWorkspaceVars((prev) => {
const next = { ...prev }
@@ -923,6 +961,7 @@ export function CredentialsManager() {
const prevInitialVars = [...initialVarsRef.current]
const prevInitialWorkspaceVars = { ...initialWorkspaceVarsRef.current }
const mutations: Promise<unknown>[] = []
try {
setShowUnsavedChanges(false)
@@ -944,8 +983,6 @@ export function CredentialsManager() {
.filter((v) => v.key && v.value)
.reduce<Record<string, string>>((acc, { key, value }) => ({ ...acc, [key]: value }), {})
await savePersonalMutation.mutateAsync({ variables: validVariables })
const before = prevInitialWorkspaceVars
const after = mergedWorkspaceVars
const toUpsert: Record<string, string> = {}
@@ -961,14 +998,37 @@ export function CredentialsManager() {
if (!(k in after)) toDelete.push(k)
}
if (workspaceId) {
if (Object.keys(toUpsert).length) {
await upsertWorkspaceMutation.mutateAsync({ workspaceId, variables: toUpsert })
}
if (toDelete.length) {
await removeWorkspaceMutation.mutateAsync({ workspaceId, keys: toDelete })
const personalChanged = (() => {
const initialMap = new Map(
prevInitialVars.filter((v) => v.key && v.value).map((v) => [v.key, v.value])
)
const currentKeys = Object.keys(validVariables)
if (initialMap.size !== currentKeys.length) return true
for (const [key, value] of Object.entries(validVariables)) {
if (initialMap.get(key) !== value) return true
}
return false
})()
if (personalChanged) {
mutations.push(savePersonalMutation.mutateAsync({ variables: validVariables }))
}
if (workspaceId && (Object.keys(toUpsert).length || toDelete.length)) {
mutations.push(
(async () => {
if (Object.keys(toUpsert).length) {
await upsertWorkspaceMutation.mutateAsync({ workspaceId, variables: toUpsert })
}
if (toDelete.length) {
await removeWorkspaceMutation.mutateAsync({ workspaceId, keys: toDelete })
}
})()
)
}
const results = await Promise.allSettled(mutations)
const firstFailure = results.find((r): r is PromiseRejectedResult => r.status === 'rejected')
if (firstFailure) throw firstFailure.reason
setWorkspaceVars(mergedWorkspaceVars)
setNewWorkspaceRows([createEmptyEnvVar()])
@@ -977,17 +1037,13 @@ export function CredentialsManager() {
initialVarsRef.current = prevInitialVars
initialWorkspaceVarsRef.current = prevInitialWorkspaceVars
logger.error('Failed to save environment variables:', error)
} finally {
if (mutations.length > 0) {
queryClient.invalidateQueries({ queryKey: workspaceCredentialKeys.lists() })
}
}
}, [
isListSaving,
envVars,
workspaceVars,
newWorkspaceRows,
workspaceId,
savePersonalMutation,
upsertWorkspaceMutation,
removeWorkspaceMutation,
])
// eslint-disable-next-line react-hooks/exhaustive-deps -- mutation objects and queryClient are stable (TanStack Query v5)
}, [isListSaving, envVars, workspaceVars, newWorkspaceRows, workspaceId])
const handleDiscardAndNavigate = useCallback(() => {
shouldBlockNavRef.current = false
@@ -1494,24 +1550,27 @@ export function CredentialsManager() {
renamingKey={renamingKey}
pendingKeyValue={pendingKeyValue}
hasCredential={envKeyToCredential.has(key)}
canEdit={isWorkspaceAdmin}
onRenameStart={setRenamingKey}
onPendingKeyChange={setPendingKeyValue}
onRenameEnd={handleWorkspaceKeyRename}
onValueChange={handleWorkspaceValueChange}
onDelete={handleDeleteWorkspaceVar}
onViewDetails={(envKey) => handleViewDetails(envKey, 'env_workspace')}
/>
))}
{(searchTerm.trim()
? filteredNewWorkspaceRows
: newWorkspaceRows.map((row, index) => ({ row, originalIndex: index }))
).map(({ row, originalIndex }) => (
<NewWorkspaceVariableRow
key={row.id || originalIndex}
envVar={row}
index={originalIndex}
onUpdate={updateNewWorkspaceRow}
/>
))}
{isWorkspaceAdmin &&
(searchTerm.trim()
? filteredNewWorkspaceRows
: newWorkspaceRows.map((row, index) => ({ row, originalIndex: index }))
).map(({ row, originalIndex }) => (
<NewWorkspaceVariableRow
key={row.id || originalIndex}
envVar={row}
index={originalIndex}
onUpdate={updateNewWorkspaceRow}
/>
))}
<div className={`${COL_SPAN_ALL} h-[8px]`} />
</>
)}

View File

@@ -6,6 +6,7 @@ import { useParams, useRouter } from 'next/navigation'
import { Button, Combobox, SModalTabs, SModalTabsList, SModalTabsTrigger } from '@/components/emcn'
import { Input } from '@/components/ui'
import { formatDate } from '@/lib/core/utils/formatting'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import { RESOURCE_REGISTRY } from '@/app/workspace/[workspaceId]/home/components/mothership-view/components/resource-registry'
import type { MothershipResourceType } from '@/app/workspace/[workspaceId]/home/types'
import { DeletedItemSkeleton } from '@/app/workspace/[workspaceId]/settings/components/recently-deleted/deleted-item-skeleton'
@@ -97,7 +98,7 @@ function ResourceIcon({ resource }: { resource: DeletedResource }) {
className='h-[14px] w-[14px] shrink-0 rounded-[3px] border-[2px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -14,6 +14,7 @@ import {
} from '@/components/emcn'
import { Pencil, SquareArrowUpRight } from '@/components/emcn/icons'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import { ConversationListItem } from '@/app/workspace/[workspaceId]/components'
import type { useHoverMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import type { FolderTreeNode } from '@/stores/folders/types'
@@ -131,7 +132,7 @@ function WorkflowColorSwatch({ color }: { color: string }) {
className='h-[16px] w-[16px] flex-shrink-0 rounded-sm border-[2.5px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -5,6 +5,7 @@ import { memo } from 'react'
import { Command } from 'cmdk'
import { Blimp } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import type { CommandItemProps } from '../utils'
import { COMMAND_ITEM_CLASSNAME } from '../utils'
@@ -64,7 +65,7 @@ export const MemoizedWorkflowItem = memo(
className='h-[14px] w-[14px] flex-shrink-0 rounded-sm border-[2px]'
style={{
backgroundColor: color,
borderColor: `${color}60`,
borderColor: workflowBorderColor(color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -5,6 +5,7 @@ import { createLogger } from '@sim/logger'
import clsx from 'clsx'
import { ChevronRight, Folder, FolderOpen, MoreHorizontal } from 'lucide-react'
import { useParams, useRouter } from 'next/navigation'
import { SIM_RESOURCES_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { generateId } from '@/lib/core/utils/uuid'
import { getNextWorkflowColor } from '@/lib/workflows/colors'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
@@ -18,6 +19,10 @@ import {
useSidebarDragContext,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { SIDEBAR_SCROLL_EVENT } from '@/app/workspace/[workspaceId]/w/components/sidebar/sidebar'
import {
buildDragResources,
createSidebarDragGhost,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/utils'
import {
useCanDelete,
useDeleteFolder,
@@ -136,6 +141,7 @@ export function FolderItem({
})
const isEditingRef = useRef(false)
const dragGhostRef = useRef<HTMLElement | null>(null)
const handleCreateWorkflowInFolder = useCallback(() => {
const name = generateCreativeWorkflowName()
@@ -196,10 +202,24 @@ export function FolderItem({
}
e.dataTransfer.setData('sidebar-selection', JSON.stringify(selection))
e.dataTransfer.effectAllowed = 'move'
e.dataTransfer.effectAllowed = 'copyMove'
const resources = buildDragResources(selection, workspaceId)
if (resources.length > 0) {
e.dataTransfer.setData(SIM_RESOURCES_DRAG_TYPE, JSON.stringify(resources))
}
const total = selection.folderIds.length + selection.workflowIds.length
const ghostLabel = total > 1 ? `${folder.name} +${total - 1} more` : folder.name
const icon = total === 1 ? { kind: 'folder' as const } : undefined
const ghost = createSidebarDragGhost(ghostLabel, icon)
void ghost.offsetHeight
e.dataTransfer.setDragImage(ghost, ghost.offsetWidth / 2, ghost.offsetHeight / 2)
dragGhostRef.current = ghost
onDragStartProp?.()
},
[folder.id, onDragStartProp]
[folder.id, folder.name, workspaceId, onDragStartProp]
)
const {
@@ -212,6 +232,10 @@ export function FolderItem({
})
const handleDragEnd = useCallback(() => {
if (dragGhostRef.current) {
dragGhostRef.current.remove()
dragGhostRef.current = null
}
handleDragEndBase()
onDragEndProp?.()
}, [handleDragEndBase, onDragEndProp])

View File

@@ -5,6 +5,8 @@ import clsx from 'clsx'
import { MoreHorizontal } from 'lucide-react'
import Link from 'next/link'
import { useParams } from 'next/navigation'
import { SIM_RESOURCES_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
import { getWorkflowLockToggleIds } from '@/app/workspace/[workspaceId]/w/[workflowId]/utils'
import { ContextMenu } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/workflow-list/components/context-menu/context-menu'
@@ -16,6 +18,10 @@ import {
useItemRename,
useSidebarDragContext,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import {
buildDragResources,
createSidebarDragGhost,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/utils'
import {
useCanDelete,
useDeleteSelection,
@@ -198,6 +204,7 @@ export function WorkflowItem({
}, [isActiveWorkflow, isWorkflowLocked])
const isEditingRef = useRef(false)
const dragGhostRef = useRef<HTMLElement | null>(null)
const {
isOpen: isContextMenuOpen,
@@ -337,10 +344,25 @@ export function WorkflowItem({
}
e.dataTransfer.setData('sidebar-selection', JSON.stringify(selection))
e.dataTransfer.effectAllowed = 'move'
e.dataTransfer.effectAllowed = 'copyMove'
const resources = buildDragResources(selection, workspaceId)
if (resources.length > 0) {
e.dataTransfer.setData(SIM_RESOURCES_DRAG_TYPE, JSON.stringify(resources))
}
const total = selection.workflowIds.length + selection.folderIds.length
const ghostLabel = total > 1 ? `${workflow.name} +${total - 1} more` : workflow.name
const icon = total === 1 ? { kind: 'workflow' as const, color: workflow.color } : undefined
const ghost = createSidebarDragGhost(ghostLabel, icon)
// Force reflow so the browser can capture the rendered element
void ghost.offsetHeight
e.dataTransfer.setDragImage(ghost, ghost.offsetWidth / 2, ghost.offsetHeight / 2)
dragGhostRef.current = ghost
onDragStartProp?.()
},
[workflow.id, onDragStartProp]
[workflow.id, workflow.name, workflow.color, workspaceId, onDragStartProp]
)
const {
@@ -353,6 +375,10 @@ export function WorkflowItem({
})
const handleDragEnd = useCallback(() => {
if (dragGhostRef.current) {
dragGhostRef.current.remove()
dragGhostRef.current = null
}
handleDragEndBase()
onDragEndProp?.()
}, [handleDragEndBase, onDragEndProp])
@@ -414,7 +440,7 @@ export function WorkflowItem({
className='h-[16px] w-[16px] flex-shrink-0 rounded-sm border-[2.5px]'
style={{
backgroundColor: workflow.color,
borderColor: `${workflow.color}60`,
borderColor: workflowBorderColor(workflow.color),
backgroundClip: 'padding-box',
}}
/>

View File

@@ -37,6 +37,7 @@ import {
Wordmark,
} from '@/components/emcn/icons'
import { useSession } from '@/lib/auth/auth-client'
import { SIM_RESOURCES_DRAG_TYPE } from '@/lib/copilot/resource-types'
import { cn } from '@/lib/core/utils/cn'
import { isMacPlatform } from '@/lib/core/utils/platform'
import { buildFolderTree } from '@/lib/folders/tree'
@@ -72,7 +73,10 @@ import {
useWorkflowOperations,
useWorkspaceManagement,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/hooks'
import { groupWorkflowsByFolder } from '@/app/workspace/[workspaceId]/w/components/sidebar/utils'
import {
createSidebarDragGhost,
groupWorkflowsByFolder,
} from '@/app/workspace/[workspaceId]/w/components/sidebar/utils'
import {
useDuplicateWorkspace,
useExportWorkspace,
@@ -159,6 +163,30 @@ const SidebarTaskItem = memo(function SidebarTaskItem({
onMorePointerDown: () => void
onMoreClick: (e: React.MouseEvent<HTMLButtonElement>, taskId: string) => void
}) {
const dragGhostRef = useRef<HTMLElement | null>(null)
const handleDragStart = useCallback(
(e: React.DragEvent) => {
e.dataTransfer.effectAllowed = 'copyMove'
e.dataTransfer.setData(
SIM_RESOURCES_DRAG_TYPE,
JSON.stringify([{ type: 'task', id: task.id, title: task.name }])
)
const ghost = createSidebarDragGhost(task.name, { kind: 'task' })
void ghost.offsetHeight
e.dataTransfer.setDragImage(ghost, ghost.offsetWidth / 2, ghost.offsetHeight / 2)
dragGhostRef.current = ghost
},
[task.id, task.name]
)
const handleDragEnd = useCallback(() => {
if (dragGhostRef.current) {
dragGhostRef.current.remove()
dragGhostRef.current = null
}
}, [])
return (
<SidebarTooltip label={task.name} enabled={showCollapsedTooltips}>
<Link
@@ -182,6 +210,9 @@ const SidebarTaskItem = memo(function SidebarTaskItem({
}
}}
onContextMenu={task.id !== 'new' ? (e) => onContextMenu(e, task.id) : undefined}
draggable={task.id !== 'new'}
onDragStart={task.id !== 'new' ? handleDragStart : undefined}
onDragEnd={task.id !== 'new' ? handleDragEnd : undefined}
>
<Blimp className='h-[16px] w-[16px] flex-shrink-0 text-[var(--text-icon)]' />
<div className='min-w-0 flex-1 truncate font-base text-[var(--text-body)]'>{task.name}</div>

View File

@@ -1,5 +1,96 @@
import type { MothershipResource } from '@/lib/copilot/resource-types'
import { workflowBorderColor } from '@/lib/workspaces/colors'
import { getFolderMap } from '@/hooks/queries/utils/folder-cache'
import { getWorkflows } from '@/hooks/queries/utils/workflow-cache'
import type { WorkflowMetadata } from '@/stores/workflows/registry/types'
/**
* Builds a `MothershipResource` array from a sidebar drag selection so it can
* be set as `application/x-sim-resources` drag data and dropped into the chat.
*/
export function buildDragResources(
selection: { workflowIds: string[]; folderIds: string[] },
workspaceId: string
): MothershipResource[] {
const allWorkflows = getWorkflows(workspaceId)
const workflowMap = Object.fromEntries(allWorkflows.map((w) => [w.id, w]))
const folderMap = getFolderMap(workspaceId)
return [
...selection.workflowIds.map((id) => ({
type: 'workflow' as const,
id,
title: workflowMap[id]?.name ?? id,
})),
...selection.folderIds.map((id) => ({
type: 'folder' as const,
id,
title: folderMap[id]?.name ?? id,
})),
]
}
export type SidebarDragGhostIcon =
| { kind: 'workflow'; color: string }
| { kind: 'folder' }
| { kind: 'task' }
const FOLDER_SVG = `<svg width="14" height="14" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" aria-hidden="true"><path d="M20 20a2 2 0 0 0 2-2V8a2 2 0 0 0-2-2h-7.9a2 2 0 0 1-1.69-.9L9.6 3.9A2 2 0 0 0 7.93 3H4a2 2 0 0 0-2 2v13a2 2 0 0 0 2 2Z"/></svg>`
const BLIMP_SVG = `<svg width="14" height="14" viewBox="1.25 1.25 18 18" fill="currentColor" stroke="currentColor" stroke-width="0.75" stroke-linejoin="round" aria-hidden="true"><path transform="translate(20.5, 0) scale(-1, 1)" d="M18.24 9.18C18.16 8.94 18 8.74 17.83 8.56L17.83 8.56C17.67 8.4 17.49 8.25 17.3 8.11V5.48C17.3 5.32 17.24 5.17 17.14 5.06C17.06 4.95 16.93 4.89 16.79 4.89H15.93C15.61 4.89 15.32 5.11 15.19 5.44L14.68 6.77C14.05 6.51 13.23 6.22 12.15 6C11.04 5.77 9.66 5.61 7.9 5.61C5.97 5.61 4.56 6.13 3.61 6.89C3.14 7.28 2.78 7.72 2.54 8.19C2.29 8.66 2.18 9.15 2.18 9.63C2.18 10.1 2.29 10.59 2.52 11.06C2.87 11.76 3.48 12.41 4.34 12.89C4.91 13.2 5.61 13.44 6.43 13.56L6.8 14.78C6.94 15.27 7.33 15.59 7.78 15.59H10.56C11.06 15.59 11.48 15.18 11.58 14.61L11.81 13.29C12.31 13.2 12.75 13.09 13.14 12.99C13.74 12.82 14.24 12.64 14.67 12.48L15.19 13.82C15.32 14.16 15.61 14.38 15.93 14.38H16.79C16.93 14.38 17.06 14.31 17.14 14.2C17.24 14.1 17.29 13.95 17.3 13.79V11.15C17.33 11.12 17.37 11.09 17.42 11.07L17.4 11.07L17.42 11.07C17.65 10.89 17.87 10.69 18.04 10.46C18.12 10.35 18.19 10.22 18.24 10.08C18.29 9.94 18.32 9.79 18.32 9.63C18.32 9.47 18.29 9.32 18.24 9.18Z"/></svg>`
/**
* Creates a lightweight drag ghost pill showing an icon and label for the item(s) being dragged.
* Append to `document.body`, pass to `e.dataTransfer.setDragImage`, then remove on dragend.
*/
export function createSidebarDragGhost(label: string, icon?: SidebarDragGhostIcon): HTMLElement {
const ghost = document.createElement('div')
ghost.style.cssText = `
position: fixed;
top: -500px;
left: 0;
display: inline-flex;
align-items: center;
gap: 6px;
padding: 4px 10px;
background: var(--surface-active);
border: 1px solid rgba(255,255,255,0.08);
border-radius: 8px;
font-family: system-ui, -apple-system, sans-serif;
font-size: 13px;
color: var(--text-body);
white-space: nowrap;
pointer-events: none;
box-shadow: 0 4px 12px rgba(0,0,0,0.4);
z-index: 9999;
`
if (icon) {
if (icon.kind === 'workflow') {
const square = document.createElement('div')
square.style.cssText = `
width: 14px; height: 14px; flex-shrink: 0;
border-radius: 3px; border: 2px solid ${workflowBorderColor(icon.color)};
background: ${icon.color}; background-clip: padding-box;
`
ghost.appendChild(square)
} else {
const iconWrapper = document.createElement('div')
iconWrapper.style.cssText =
'display: flex; align-items: center; flex-shrink: 0; color: var(--text-icon);'
iconWrapper.innerHTML = icon.kind === 'folder' ? FOLDER_SVG : BLIMP_SVG
ghost.appendChild(iconWrapper)
}
}
const text = document.createElement('span')
text.style.cssText = 'max-width: 200px; overflow: hidden; text-overflow: ellipsis;'
text.textContent = label
ghost.appendChild(text)
document.body.appendChild(ghost)
return ghost
}
export function compareByOrder<T extends { sortOrder: number; createdAt?: Date; id: string }>(
a: T,
b: T

View File

@@ -166,7 +166,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const positionUpdateTimeouts = useRef<Map<string, number>>(new Map())
const isRejoiningRef = useRef<boolean>(false)
const pendingPositionUpdates = useRef<Map<string, any>>(new Map())
const deletedWorkflowIdRef = useRef<string | null>(null)
const generateSocketToken = async (): Promise<string> => {
const res = await fetch('/api/auth/socket-token', {
@@ -372,7 +371,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socketInstance.on('workflow-deleted', (data) => {
logger.warn(`Workflow ${data.workflowId} has been deleted`)
deletedWorkflowIdRef.current = data.workflowId
setCurrentWorkflowId((current) => {
if (current === data.workflowId) {
setPresenceUsers([])
@@ -502,11 +500,7 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
if (error?.type === 'SESSION_ERROR') {
const workflowId = urlWorkflowIdRef.current
if (
workflowId &&
!isRejoiningRef.current &&
deletedWorkflowIdRef.current !== workflowId
) {
if (workflowId && !isRejoiningRef.current) {
isRejoiningRef.current = true
logger.info(`Session expired, rejoining workflow: ${workflowId}`)
socketInstance.emit('join-workflow', {
@@ -558,25 +552,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const hydrationPhase = useWorkflowRegistryStore((s) => s.hydration.phase)
useEffect(() => {
if (!socket || !isConnected || !urlWorkflowId) {
if (!urlWorkflowId) {
deletedWorkflowIdRef.current = null
}
return
}
if (!socket || !isConnected || !urlWorkflowId) return
if (hydrationPhase === 'creating') return
// Skip if already in the correct room
if (currentWorkflowId === urlWorkflowId) return
// Prevent rejoining a workflow that was just deleted. The URL param may
// still reference the old workflow while router.push() propagates.
if (deletedWorkflowIdRef.current === urlWorkflowId) {
return
}
deletedWorkflowIdRef.current = null
logger.info(
`URL workflow changed from ${currentWorkflowId} to ${urlWorkflowId}, switching rooms`
)

View File

@@ -0,0 +1,469 @@
import { AthenaIcon } from '@/components/icons'
import type { BlockConfig } from '@/blocks/types'
import { IntegrationType } from '@/blocks/types'
import type {
AthenaCreateNamedQueryResponse,
AthenaGetNamedQueryResponse,
AthenaGetQueryExecutionResponse,
AthenaGetQueryResultsResponse,
AthenaListNamedQueriesResponse,
AthenaListQueryExecutionsResponse,
AthenaStartQueryResponse,
AthenaStopQueryResponse,
} from '@/tools/athena/types'
export const AthenaBlock: BlockConfig<
| AthenaStartQueryResponse
| AthenaGetQueryExecutionResponse
| AthenaGetQueryResultsResponse
| AthenaStopQueryResponse
| AthenaListQueryExecutionsResponse
| AthenaCreateNamedQueryResponse
| AthenaGetNamedQueryResponse
| AthenaListNamedQueriesResponse
> = {
type: 'athena',
name: 'Athena',
description: 'Run SQL queries on data in Amazon S3 using AWS Athena',
longDescription:
'Integrate AWS Athena into workflows. Execute SQL queries against data in S3, check query status, retrieve results, manage named queries, and list executions. Requires AWS access key and secret access key.',
docsLink: 'https://docs.sim.ai/tools/athena',
category: 'tools',
integrationType: IntegrationType.Analytics,
tags: ['cloud', 'data-analytics'],
bgColor: 'linear-gradient(45deg, #4D27A8 0%, #A166FF 100%)',
icon: AthenaIcon,
subBlocks: [
{
id: 'operation',
title: 'Operation',
type: 'dropdown',
options: [
{ label: 'Start Query', id: 'start_query' },
{ label: 'Get Query Execution', id: 'get_query_execution' },
{ label: 'Get Query Results', id: 'get_query_results' },
{ label: 'Stop Query', id: 'stop_query' },
{ label: 'List Query Executions', id: 'list_query_executions' },
{ label: 'Create Named Query', id: 'create_named_query' },
{ label: 'Get Named Query', id: 'get_named_query' },
{ label: 'List Named Queries', id: 'list_named_queries' },
],
value: () => 'start_query',
},
{
id: 'awsRegion',
title: 'AWS Region',
type: 'short-input',
placeholder: 'us-east-1',
required: true,
},
{
id: 'awsAccessKeyId',
title: 'AWS Access Key ID',
type: 'short-input',
placeholder: 'AKIA...',
password: true,
required: true,
},
{
id: 'awsSecretAccessKey',
title: 'AWS Secret Access Key',
type: 'short-input',
placeholder: 'Your secret access key',
password: true,
required: true,
},
{
id: 'queryString',
title: 'SQL Query',
type: 'code',
placeholder: 'SELECT * FROM my_table LIMIT 10',
condition: { field: 'operation', value: ['start_query', 'create_named_query'] },
required: { field: 'operation', value: ['start_query', 'create_named_query'] },
wandConfig: {
enabled: true,
prompt: `Generate an SQL query for AWS Athena based on the user's description.
Athena uses Trino/Presto SQL syntax. Common patterns:
- SELECT * FROM "database"."table" LIMIT 10
- SELECT column1, COUNT(*) FROM table GROUP BY column1
- SELECT * FROM table WHERE date_column > DATE '2024-01-01'
- CREATE TABLE new_table AS SELECT ... FROM source_table
- SELECT * FROM table WHERE column IN ('value1', 'value2')
Return ONLY the SQL query — no explanations, no markdown code blocks.`,
placeholder: 'Describe what data you want to query...',
},
},
{
id: 'database',
title: 'Database',
type: 'short-input',
placeholder: 'my_database',
condition: { field: 'operation', value: ['start_query', 'create_named_query'] },
required: { field: 'operation', value: 'create_named_query' },
},
{
id: 'catalog',
title: 'Data Catalog',
type: 'short-input',
placeholder: 'AwsDataCatalog',
condition: { field: 'operation', value: 'start_query' },
mode: 'advanced',
},
{
id: 'outputLocation',
title: 'Output Location (S3)',
type: 'short-input',
placeholder: 's3://my-bucket/athena-results/',
condition: { field: 'operation', value: 'start_query' },
mode: 'advanced',
},
{
id: 'workGroup',
title: 'Workgroup',
type: 'short-input',
placeholder: 'primary',
condition: {
field: 'operation',
value: ['start_query', 'list_query_executions', 'create_named_query', 'list_named_queries'],
},
mode: 'advanced',
},
{
id: 'queryExecutionId',
title: 'Query Execution ID',
type: 'short-input',
placeholder: 'e.g., a1b2c3d4-5678-90ab-cdef-example11111',
condition: {
field: 'operation',
value: ['get_query_execution', 'get_query_results', 'stop_query'],
},
required: {
field: 'operation',
value: ['get_query_execution', 'get_query_results', 'stop_query'],
},
},
{
id: 'namedQueryId',
title: 'Named Query ID',
type: 'short-input',
placeholder: 'e.g., a1b2c3d4-5678-90ab-cdef-example11111',
condition: { field: 'operation', value: 'get_named_query' },
required: { field: 'operation', value: 'get_named_query' },
},
{
id: 'queryName',
title: 'Query Name',
type: 'short-input',
placeholder: 'My Saved Query',
condition: { field: 'operation', value: 'create_named_query' },
required: { field: 'operation', value: 'create_named_query' },
},
{
id: 'queryDescription',
title: 'Description',
type: 'short-input',
placeholder: 'Description of what this query does',
condition: { field: 'operation', value: 'create_named_query' },
mode: 'advanced',
},
{
id: 'maxResults',
title: 'Max Results',
type: 'short-input',
placeholder: '50',
condition: {
field: 'operation',
value: ['get_query_results', 'list_query_executions', 'list_named_queries'],
},
mode: 'advanced',
},
{
id: 'nextToken',
title: 'Pagination Token',
type: 'short-input',
placeholder: 'Token from previous request',
condition: {
field: 'operation',
value: ['get_query_results', 'list_query_executions', 'list_named_queries'],
},
mode: 'advanced',
},
],
tools: {
access: [
'athena_start_query',
'athena_get_query_execution',
'athena_get_query_results',
'athena_stop_query',
'athena_list_query_executions',
'athena_create_named_query',
'athena_get_named_query',
'athena_list_named_queries',
],
config: {
tool: (params) => {
switch (params.operation) {
case 'start_query':
return 'athena_start_query'
case 'get_query_execution':
return 'athena_get_query_execution'
case 'get_query_results':
return 'athena_get_query_results'
case 'stop_query':
return 'athena_stop_query'
case 'list_query_executions':
return 'athena_list_query_executions'
case 'create_named_query':
return 'athena_create_named_query'
case 'get_named_query':
return 'athena_get_named_query'
case 'list_named_queries':
return 'athena_list_named_queries'
default:
throw new Error(`Invalid Athena operation: ${params.operation}`)
}
},
params: (params) => {
const { operation, maxResults, ...rest } = params
const awsRegion = rest.awsRegion
const awsAccessKeyId = rest.awsAccessKeyId
const awsSecretAccessKey = rest.awsSecretAccessKey
const parsedMaxResults = maxResults ? Number.parseInt(String(maxResults), 10) : undefined
switch (operation) {
case 'start_query':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
queryString: rest.queryString,
...(rest.database && { database: rest.database }),
...(rest.catalog && { catalog: rest.catalog }),
...(rest.outputLocation && { outputLocation: rest.outputLocation }),
...(rest.workGroup && { workGroup: rest.workGroup }),
}
case 'get_query_execution':
if (!rest.queryExecutionId) {
throw new Error('Query execution ID is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
queryExecutionId: rest.queryExecutionId,
}
case 'get_query_results':
if (!rest.queryExecutionId) {
throw new Error('Query execution ID is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
queryExecutionId: rest.queryExecutionId,
...(parsedMaxResults !== undefined && { maxResults: parsedMaxResults }),
...(rest.nextToken && { nextToken: rest.nextToken }),
}
case 'stop_query':
if (!rest.queryExecutionId) {
throw new Error('Query execution ID is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
queryExecutionId: rest.queryExecutionId,
}
case 'list_query_executions':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
...(rest.workGroup && { workGroup: rest.workGroup }),
...(parsedMaxResults !== undefined && { maxResults: parsedMaxResults }),
...(rest.nextToken && { nextToken: rest.nextToken }),
}
case 'create_named_query': {
if (!rest.queryName) {
throw new Error('Query name is required')
}
if (!rest.database) {
throw new Error('Database is required')
}
if (!rest.queryString) {
throw new Error('SQL query string is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
name: rest.queryName,
database: rest.database,
queryString: rest.queryString,
...(rest.queryDescription && { description: rest.queryDescription }),
...(rest.workGroup && { workGroup: rest.workGroup }),
}
}
case 'get_named_query':
if (!rest.namedQueryId) {
throw new Error('Named query ID is required')
}
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
namedQueryId: rest.namedQueryId,
}
case 'list_named_queries':
return {
awsRegion,
awsAccessKeyId,
awsSecretAccessKey,
...(rest.workGroup && { workGroup: rest.workGroup }),
...(parsedMaxResults !== undefined && { maxResults: parsedMaxResults }),
...(rest.nextToken && { nextToken: rest.nextToken }),
}
default:
throw new Error(`Invalid Athena operation: ${operation}`)
}
},
},
},
inputs: {
operation: { type: 'string', description: 'Athena operation to perform' },
awsRegion: { type: 'string', description: 'AWS region' },
awsAccessKeyId: { type: 'string', description: 'AWS access key ID' },
awsSecretAccessKey: { type: 'string', description: 'AWS secret access key' },
queryString: { type: 'string', description: 'SQL query string' },
database: { type: 'string', description: 'Database name' },
catalog: { type: 'string', description: 'Data catalog name' },
outputLocation: { type: 'string', description: 'S3 output location for results' },
workGroup: { type: 'string', description: 'Athena workgroup name' },
queryExecutionId: { type: 'string', description: 'Query execution ID' },
namedQueryId: { type: 'string', description: 'Named query ID' },
queryName: { type: 'string', description: 'Name for a saved query' },
queryDescription: { type: 'string', description: 'Description for a saved query' },
maxResults: { type: 'number', description: 'Maximum number of results' },
nextToken: { type: 'string', description: 'Pagination token' },
},
outputs: {
queryExecutionId: {
type: 'string',
description: 'Query execution ID',
},
query: {
type: 'string',
description: 'SQL query string',
},
state: {
type: 'string',
description: 'Query state (QUEUED, RUNNING, SUCCEEDED, FAILED, CANCELLED)',
},
stateChangeReason: {
type: 'string',
description: 'Reason for state change',
},
statementType: {
type: 'string',
description: 'Statement type (DDL, DML, UTILITY)',
},
database: {
type: 'string',
description: 'Database name',
},
catalog: {
type: 'string',
description: 'Data catalog name',
},
workGroup: {
type: 'string',
description: 'Workgroup name',
},
submissionDateTime: {
type: 'number',
description: 'Query submission time (Unix epoch ms)',
},
completionDateTime: {
type: 'number',
description: 'Query completion time (Unix epoch ms)',
},
dataScannedInBytes: {
type: 'number',
description: 'Data scanned in bytes',
},
engineExecutionTimeInMillis: {
type: 'number',
description: 'Engine execution time in ms',
},
queryPlanningTimeInMillis: {
type: 'number',
description: 'Query planning time in ms',
},
queryQueueTimeInMillis: {
type: 'number',
description: 'Time spent in queue in ms',
},
totalExecutionTimeInMillis: {
type: 'number',
description: 'Total execution time in ms',
},
outputLocation: {
type: 'string',
description: 'S3 location of query results',
},
columns: {
type: 'array',
description: 'Column metadata (name and type)',
},
rows: {
type: 'array',
description: 'Result rows as key-value objects',
},
nextToken: {
type: 'string',
description: 'Pagination token for next page',
},
updateCount: {
type: 'number',
description: 'Rows affected by INSERT/UPDATE',
},
success: {
type: 'boolean',
description: 'Whether the operation succeeded',
},
queryExecutionIds: {
type: 'array',
description: 'List of query execution IDs',
},
namedQueryId: {
type: 'string',
description: 'Named query ID',
},
name: {
type: 'string',
description: 'Named query name',
},
description: {
type: 'string',
description: 'Named query description',
},
queryString: {
type: 'string',
description: 'Named query SQL string',
},
namedQueryIds: {
type: 'array',
description: 'List of named query IDs',
},
},
}

View File

@@ -1634,8 +1634,21 @@ Do not include any explanations, markdown formatting, or other text outside the
// Trigger outputs (when used as webhook trigger)
event_type: { type: 'string', description: 'Type of Slack event that triggered the workflow' },
subtype: {
type: 'string',
description:
'Message subtype (e.g., channel_join, channel_leave, bot_message). Null for regular user messages',
},
channel_name: { type: 'string', description: 'Human-readable channel name' },
channel_type: {
type: 'string',
description: 'Type of channel (e.g., channel, group, im, mpim)',
},
user_name: { type: 'string', description: 'Username who triggered the event' },
bot_id: {
type: 'string',
description: 'Bot ID if the message was sent by a bot. Null for human users',
},
timestamp: { type: 'string', description: 'Message timestamp from the triggering event' },
thread_ts: {
type: 'string',

View File

@@ -13,6 +13,7 @@ import { ApolloBlock } from '@/blocks/blocks/apollo'
import { ArxivBlock } from '@/blocks/blocks/arxiv'
import { AsanaBlock } from '@/blocks/blocks/asana'
import { AshbyBlock } from '@/blocks/blocks/ashby'
import { AthenaBlock } from '@/blocks/blocks/athena'
import { AttioBlock } from '@/blocks/blocks/attio'
import { BoxBlock } from '@/blocks/blocks/box'
import { BrandfetchBlock } from '@/blocks/blocks/brandfetch'
@@ -236,6 +237,7 @@ export const registry: Record<string, BlockConfig> = {
arxiv: ArxivBlock,
asana: AsanaBlock,
ashby: AshbyBlock,
athena: AthenaBlock,
attio: AttioBlock,
brandfetch: BrandfetchBlock,
box: BoxBlock,

View File

@@ -4687,6 +4687,33 @@ export function CloudFormationIcon(props: SVGProps<SVGSVGElement>) {
)
}
export function AthenaIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg
{...props}
viewBox='0 0 80 80'
version='1.1'
xmlns='http://www.w3.org/2000/svg'
xmlnsXlink='http://www.w3.org/1999/xlink'
>
<g
id='Icon-Architecture/64/Arch_Amazon-Athena_64'
stroke='none'
strokeWidth='1'
fill='none'
fillRule='evenodd'
transform='translate(40, 40) scale(1.25) translate(-40, -40)'
>
<path
d='M38.29505,27.2267312 C42.787319,27.2267312 45.2478437,28.2331825 45.6964751,28.7379193 C45.2478437,29.2426562 42.787319,30.2491074 38.29505,30.2491074 C33.8027811,30.2491074 31.3422564,29.2426562 30.893625,28.7379193 C31.3422564,28.2331825 33.8027811,27.2267312 38.29505,27.2267312 L38.29505,27.2267312 Z M37.7838882,35.2823712 C37.6191254,35.1977447 37.5029973,35.0294991 37.5029973,34.8300223 C37.5029973,34.5499487 37.7292981,34.3212556 38.0062188,34.3212556 C38.0866151,34.3212556 38.1600636,34.3444272 38.2285494,34.3796882 L37.7838882,35.2823712 Z M43.5674612,43.5908834 C43.4930201,43.6513309 43.322302,43.7681961 42.9709403,43.9092403 C42.6582879,44.0341652 42.2880677,44.1470006 41.8682202,44.2457316 C40.7525971,44.5076708 39.3808968,44.6517374 38.0052262,44.6517374 C34.9968155,44.6517374 32.9005556,44.0019265 32.4489466,43.5989431 L31.1159556,31.150783 C33.1596104,31.9869737 36.1700063,32.2640249 38.29505,32.2640249 C40.3843621,32.2640249 43.3292498,31.9950334 45.3719121,31.1910813 L44.5748967,36.6656121 C43.0731726,36.0994203 41.1992434,35.2773339 39.4235763,34.4129344 C39.2429327,33.786295 38.6801584,33.3248789 38.0062188,33.3248789 C37.1883598,33.3248789 36.5233532,34.0008837 36.5233532,34.8300223 C36.5233532,35.6611757 37.1883598,36.3361731 38.0062188,36.3361731 C38.1997655,36.3361731 38.3843793,36.2958747 38.5531123,36.2273675 C41.0344805,37.4524373 42.8835961,38.2382552 44.2751474,38.7228428 L43.5674612,43.5908834 Z M28.8718062,28.8467249 L30.4787403,43.8498003 C30.5918907,46.6344162 37.6995217,46.6666549 38.0052262,46.6666549 C39.5268012,46.6666549 41.0573091,46.5034466 42.3148665,46.2092686 C42.8299985,46.0883736 43.2964958,45.9453144 43.7004625,45.7831136 C44.8736534,45.3116229 45.4890327,44.6688642 45.5317122,43.8739793 L46.2006891,39.2759376 C46.6562683,39.3696313 47.0284735,39.4109371 47.3252452,39.4109371 C48.2592321,39.4109371 48.5053839,39.0281028 48.6751094,38.7641486 C48.853768,38.48609 48.9053804,38.1445615 48.8220064,37.8010181 C48.6314374,37.0111704 47.5168068,35.971473 46.7723963,35.3539008 L47.7133311,28.8850083 L47.7043982,28.8840008 C47.7083684,28.8346354 47.7242492,28.7882923 47.7242492,28.7379193 C47.7242492,25.9543109 41.7967568,25.2118138 38.29505,25.2118138 C34.7933433,25.2118138 28.8658509,25.9543109 28.8658509,28.7379193 C28.8658509,28.7751953 28.8787541,28.8084414 28.8807391,28.8457174 L28.8718062,28.8467249 Z M37.8355007,20.0596698 C46.4865427,20.0596698 53.5246954,27.2035597 53.5246954,35.98457 C53.5246954,44.7655803 46.4865427,51.9094701 37.8355007,51.9094701 C29.1834661,51.9094701 22.1453133,44.7655803 22.1453133,35.98457 C22.1453133,27.2035597 29.1834661,20.0596698 37.8355007,20.0596698 L37.8355007,20.0596698 Z M12.9850945,41.8348828 L12.9850945,43.8498003 L21.91802,43.8498003 L21.91802,43.7309201 C24.7735785,49.7494786 30.8261318,53.9243876 37.8355007,53.9243876 C47.5803298,53.9243876 55.50979,45.8768072 55.50979,35.98457 C55.50979,26.0923327 47.5803298,18.0447524 37.8355007,18.0447524 C30.253432,18.0447524 23.7909567,22.9248825 21.2857674,29.7453781 L12.9850945,29.7453781 L12.9850945,31.7602955 L20.6763434,31.7602955 C20.3666686,33.0568949 20.1850325,34.4018523 20.1701443,35.7901304 L11,35.7901304 L11,37.8050479 L20.2515331,37.8050479 C20.3914823,39.2044081 20.7061198,40.548358 21.1448257,41.8348828 L12.9850945,41.8348828 Z M67.0799136,66.035049 C65.8789314,67.2560889 63.7965672,67.2631412 62.5965775,66.046131 L51.9326496,55.220987 C53.6487638,53.9223727 55.1802643,52.3900279 56.4934043,50.6763406 L67.0918241,61.4853653 C67.688345,62.0918555 68.0168782,62.8998374 68.014902,63.7591997 C68.0139005,64.6205769 67.6823898,65.4275513 67.0799136,66.035049 L67.0799136,66.035049 Z M68.4972711,60.0628336 L57.6616325,49.0100039 C60.0635969,45.2562127 61.4650736,40.7851108 61.4650736,35.98457 C61.4650736,22.7586518 50.8646687,12 37.8355007,12 C28.4728022,12 19.9825528,17.6196048 16.2039254,26.316996 L18.0202869,27.1290077 C21.4812992,19.1630316 29.2588997,14.0149175 37.8355007,14.0149175 C49.7708816,14.0149175 59.4799791,23.8698788 59.4799791,35.98457 C59.4799791,48.0982537 49.7708816,57.9542225 37.8355007,57.9542225 C29.8623684,57.9542225 22.5572205,53.5244265 18.7686675,46.3936336 L17.0217843,47.3507194 C21.1557437,55.1343455 29.1318536,59.9691399 37.8355007,59.9691399 C42.3912926,59.9691399 46.6483279,58.6503765 50.2602074,56.3735197 L61.1941082,67.4716851 C62.1648195,68.4569797 63.4561235,69 64.8278238,69 C66.2074645,69 67.5067089,68.4529499 68.4813903,67.462618 C69.4580568,66.4773233 69.9980025,65.1635972 70,63.7622221 C70.0029653,62.3628619 69.4679823,61.0491357 68.4972711,60.0628336 L68.4972711,60.0628336 Z'
id='Amazon-Athena_Icon_64_Squid'
fill='currentColor'
/>
</g>
</svg>
)
}
export function CloudWatchIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg

View File

@@ -0,0 +1,46 @@
'use client'
import { useCallback, useEffect, useRef, useState } from 'react'
import { Check, Copy } from '@/components/emcn'
import { cn } from '@/lib/core/utils/cn'
interface CopyCodeButtonProps {
code: string
className?: string
}
export function CopyCodeButton({ code, className }: CopyCodeButtonProps) {
const [copied, setCopied] = useState(false)
const timerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const handleCopy = useCallback(async () => {
try {
await navigator.clipboard.writeText(code)
setCopied(true)
if (timerRef.current) clearTimeout(timerRef.current)
timerRef.current = setTimeout(() => setCopied(false), 2000)
} catch {
// Clipboard write can fail when document lacks focus or permission is denied
}
}, [code])
useEffect(
() => () => {
if (timerRef.current) clearTimeout(timerRef.current)
},
[]
)
return (
<button
type='button'
onClick={handleCopy}
className={cn(
'flex items-center gap-1 rounded px-1.5 py-0.5 text-xs transition-colors',
className
)}
>
{copied ? <Check className='size-3.5' /> : <Copy className='size-3.5' />}
</button>
)
}

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { AsanaIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, joinTagArray, parseTagDate } from '@/connectors/utils'
import { joinTagArray, parseTagDate } from '@/connectors/utils'
const logger = createLogger('AsanaConnector')
@@ -240,7 +240,6 @@ export const asanaConnector: ConnectorConfig = {
for (const task of result.data) {
const content = buildTaskContent(task)
const contentHash = await computeContentHash(content)
const tagNames = task.tags?.map((t) => t.name).filter(Boolean) || []
documents.push({
@@ -249,7 +248,7 @@ export const asanaConnector: ConnectorConfig = {
content,
mimeType: 'text/plain',
sourceUrl: task.permalink_url || undefined,
contentHash,
contentHash: `asana:${task.gid}:${task.modified_at ?? ''}`,
metadata: {
project: currentProjectGid,
assignee: task.assignee?.name,
@@ -315,7 +314,6 @@ export const asanaConnector: ConnectorConfig = {
if (!task) return null
const content = buildTaskContent(task)
const contentHash = await computeContentHash(content)
const tagNames = task.tags?.map((t) => t.name).filter(Boolean) || []
return {
@@ -324,7 +322,7 @@ export const asanaConnector: ConnectorConfig = {
content,
mimeType: 'text/plain',
sourceUrl: task.permalink_url || undefined,
contentHash,
contentHash: `asana:${task.gid}:${task.modified_at ?? ''}`,
metadata: {
assignee: task.assignee?.name,
completed: task.completed,

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { FirefliesIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('FirefliesConnector')
@@ -196,17 +196,6 @@ export const firefliesConnector: ConnectorConfig = {
id
name
}
sentences {
index
speaker_name
text
}
summary {
keywords
action_items
overview
short_summary
}
}
}`,
variables
@@ -214,32 +203,27 @@ export const firefliesConnector: ConnectorConfig = {
const transcripts = (data.transcripts || []) as FirefliesTranscript[]
const documents: ExternalDocument[] = await Promise.all(
transcripts.map(async (transcript) => {
const content = formatTranscriptContent(transcript)
const contentHash = await computeContentHash(content)
const documents: ExternalDocument[] = transcripts.map((transcript) => {
const meetingDate = transcript.date ? new Date(transcript.date).toISOString() : undefined
const speakerNames = transcript.speakers?.map((s) => s.name).filter(Boolean) ?? []
const meetingDate = transcript.date ? new Date(transcript.date).toISOString() : undefined
const speakerNames = transcript.speakers?.map((s) => s.name).filter(Boolean) ?? []
return {
externalId: transcript.id,
title: transcript.title || 'Untitled Meeting',
content,
mimeType: 'text/plain' as const,
sourceUrl: transcript.transcript_url || undefined,
contentHash,
metadata: {
hostEmail: transcript.host_email,
duration: transcript.duration,
meetingDate,
participants: transcript.participants,
speakers: speakerNames,
keywords: transcript.summary?.keywords,
},
}
})
)
return {
externalId: transcript.id,
title: transcript.title || 'Untitled Meeting',
content: '',
contentDeferred: true,
mimeType: 'text/plain' as const,
sourceUrl: transcript.transcript_url || undefined,
contentHash: `fireflies:${transcript.id}:${transcript.date ?? ''}:${transcript.duration ?? ''}`,
metadata: {
hostEmail: transcript.host_email,
duration: transcript.duration,
meetingDate,
participants: transcript.participants,
speakers: speakerNames,
},
}
})
const totalFetched = ((syncContext?.totalDocsFetched as number) ?? 0) + documents.length
if (syncContext) syncContext.totalDocsFetched = totalFetched
@@ -296,7 +280,7 @@ export const firefliesConnector: ConnectorConfig = {
if (!transcript) return null
const content = formatTranscriptContent(transcript)
const contentHash = await computeContentHash(content)
const contentHash = `fireflies:${transcript.id}:${transcript.date ?? ''}:${transcript.duration ?? ''}`
const meetingDate = transcript.date ? new Date(transcript.date).toISOString() : undefined
const speakerNames = transcript.speakers?.map((s) => s.name).filter(Boolean) ?? []
@@ -305,6 +289,7 @@ export const firefliesConnector: ConnectorConfig = {
externalId: transcript.id,
title: transcript.title || 'Untitled Meeting',
content,
contentDeferred: false,
mimeType: 'text/plain',
sourceUrl: transcript.transcript_url || undefined,
contentHash,

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { GoogleCalendarIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('GoogleCalendarConnector')
@@ -195,14 +195,12 @@ function getTimeRange(sourceConfig: Record<string, unknown>): { timeMin: string;
/**
* Converts a CalendarEvent to an ExternalDocument.
*/
async function eventToDocument(event: CalendarEvent): Promise<ExternalDocument | null> {
function eventToDocument(event: CalendarEvent): ExternalDocument | null {
if (event.status === 'cancelled') return null
const content = eventToContent(event)
if (!content.trim()) return null
const contentHash = await computeContentHash(content)
const startTime = event.start?.dateTime || event.start?.date || ''
const attendeeCount = event.attendees?.filter((a) => !a.resource).length || 0
@@ -212,7 +210,7 @@ async function eventToDocument(event: CalendarEvent): Promise<ExternalDocument |
content,
mimeType: 'text/plain',
sourceUrl: event.htmlLink || `https://calendar.google.com/calendar/event?eid=${event.id}`,
contentHash,
contentHash: `gcal:${event.id}:${event.updated ?? ''}`,
metadata: {
startTime,
endTime: event.end?.dateTime || event.end?.date || '',
@@ -348,7 +346,7 @@ export const googleCalendarConnector: ConnectorConfig = {
const documents: ExternalDocument[] = []
for (const event of events) {
const doc = await eventToDocument(event)
const doc = eventToDocument(event)
if (doc) documents.push(doc)
}
@@ -392,7 +390,7 @@ export const googleCalendarConnector: ConnectorConfig = {
if (event.status === 'cancelled') return null
return eventToDocument(event)
return eventToDocument(event) ?? null
},
validateConfig: async (

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { GoogleDocsIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, joinTagArray, parseTagDate } from '@/connectors/utils'
import { joinTagArray, parseTagDate } from '@/connectors/utils'
const logger = createLogger('GoogleDocsConnector')
@@ -117,40 +117,23 @@ async function fetchDocContent(accessToken: string, documentId: string): Promise
}
/**
* Converts a Drive file entry into an ExternalDocument by fetching its content
* from the Google Docs API.
* Creates a lightweight stub from a Drive file entry. Content is deferred
* and only fetched via getDocument for new or changed documents.
*/
async function fileToDocument(
accessToken: string,
file: DriveFile
): Promise<ExternalDocument | null> {
try {
const content = await fetchDocContent(accessToken, file.id)
if (!content.trim()) {
logger.info(`Skipping empty document: ${file.name} (${file.id})`)
return null
}
const contentHash = await computeContentHash(content)
return {
externalId: file.id,
title: file.name || 'Untitled',
content,
mimeType: 'text/plain',
sourceUrl: file.webViewLink || `https://docs.google.com/document/d/${file.id}/edit`,
contentHash,
metadata: {
modifiedTime: file.modifiedTime,
createdTime: file.createdTime,
owners: file.owners?.map((o) => o.displayName || o.emailAddress).filter(Boolean),
},
}
} catch (error) {
logger.warn(`Failed to extract content from document: ${file.name} (${file.id})`, {
error: error instanceof Error ? error.message : String(error),
})
return null
function fileToStub(file: DriveFile): ExternalDocument {
return {
externalId: file.id,
title: file.name || 'Untitled',
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: file.webViewLink || `https://docs.google.com/document/d/${file.id}/edit`,
contentHash: `gdocs:${file.id}:${file.modifiedTime ?? ''}`,
metadata: {
modifiedTime: file.modifiedTime,
createdTime: file.createdTime,
owners: file.owners?.map((o) => o.displayName || o.emailAddress).filter(Boolean),
},
}
}
@@ -246,18 +229,11 @@ export const googleDocsConnector: ConnectorConfig = {
const maxDocs = sourceConfig.maxDocs ? Number(sourceConfig.maxDocs) : 0
const previouslyFetched = (syncContext?.totalDocsFetched as number) ?? 0
const CONCURRENCY = 5
const documents: ExternalDocument[] = []
for (let i = 0; i < files.length; i += CONCURRENCY) {
if (maxDocs > 0 && previouslyFetched + documents.length >= maxDocs) break
const batch = files.slice(i, i + CONCURRENCY)
const results = await Promise.all(batch.map((file) => fileToDocument(accessToken, file)))
documents.push(...(results.filter(Boolean) as ExternalDocument[]))
}
let documents = files.map(fileToStub)
if (maxDocs > 0) {
const remaining = maxDocs - previouslyFetched
if (documents.length > remaining) {
documents.splice(remaining)
documents = documents.slice(0, remaining)
}
}
@@ -300,7 +276,17 @@ export const googleDocsConnector: ConnectorConfig = {
if (file.trashed) return null
if (file.mimeType !== 'application/vnd.google-apps.document') return null
return fileToDocument(accessToken, file)
try {
const content = await fetchDocContent(accessToken, file.id)
if (!content.trim()) return null
return { ...fileToStub(file), content, contentDeferred: false }
} catch (error) {
logger.warn(`Failed to extract content from document: ${file.name} (${file.id})`, {
error: error instanceof Error ? error.message : String(error),
})
return null
}
},
validateConfig: async (

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { GoogleSheetsIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('GoogleSheetsConnector')
@@ -168,7 +168,6 @@ async function sheetToDocument(
return null
}
const contentHash = await computeContentHash(content)
const rowCount = dataRows.length
return {
@@ -177,7 +176,7 @@ async function sheetToDocument(
content,
mimeType: 'text/plain',
sourceUrl: `https://docs.google.com/spreadsheets/d/${spreadsheetId}/edit#gid=${sheet.sheetId}`,
contentHash,
contentHash: `gsheets:${spreadsheetId}:${sheet.sheetId}:${modifiedTime ?? ''}`,
metadata: {
spreadsheetId,
spreadsheetTitle,
@@ -259,22 +258,24 @@ export const googleSheetsConnector: ConnectorConfig = {
sheetCount: sheets.length,
})
const documents: ExternalDocument[] = []
for (let i = 0; i < sheets.length; i += CONCURRENCY) {
const batch = sheets.slice(i, i + CONCURRENCY)
const results = await Promise.all(
batch.map((sheet) =>
sheetToDocument(
accessToken,
spreadsheetId,
metadata.properties.title,
sheet,
modifiedTime
)
)
)
documents.push(...(results.filter(Boolean) as ExternalDocument[]))
}
const documents: ExternalDocument[] = sheets.map((sheet) => ({
externalId: `${spreadsheetId}__sheet__${sheet.sheetId}`,
title: `${metadata.properties.title} - ${sheet.title}`,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `https://docs.google.com/spreadsheets/d/${spreadsheetId}/edit#gid=${sheet.sheetId}`,
contentHash: `gsheets:${spreadsheetId}:${sheet.sheetId}:${modifiedTime ?? ''}`,
metadata: {
spreadsheetId,
spreadsheetTitle: metadata.properties.title,
sheetTitle: sheet.title,
sheetId: sheet.sheetId,
rowCount: sheet.gridProperties?.rowCount,
columnCount: sheet.gridProperties?.columnCount,
...(modifiedTime ? { modifiedTime } : {}),
},
}))
return {
documents,
@@ -324,13 +325,15 @@ export const googleSheetsConnector: ConnectorConfig = {
return null
}
return sheetToDocument(
const doc = await sheetToDocument(
accessToken,
spreadsheetId,
metadata.properties.title,
sheetEntry.properties,
modifiedTime
)
if (!doc) return null
return { ...doc, contentDeferred: false }
},
validateConfig: async (

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { HubspotIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('HubSpotConnector')
@@ -140,16 +140,15 @@ function buildRecordContent(objectType: string, properties: Record<string, strin
/**
* Converts a HubSpot CRM record to an ExternalDocument.
*/
async function recordToDocument(
function recordToDocument(
record: Record<string, unknown>,
objectType: string,
portalId: string
): Promise<ExternalDocument> {
): ExternalDocument {
const id = record.id as string
const properties = (record.properties || {}) as Record<string, string | null>
const content = buildRecordContent(objectType, properties)
const contentHash = await computeContentHash(content)
const title = buildRecordTitle(objectType, properties)
const lastModified =
@@ -161,7 +160,7 @@ async function recordToDocument(
content,
mimeType: 'text/plain',
sourceUrl: `https://app.hubspot.com/contacts/${portalId}/record/${objectType}/${id}`,
contentHash,
contentHash: `hubspot:${id}:${lastModified ?? ''}`,
metadata: {
objectType,
owner: properties.hubspot_owner_id || undefined,
@@ -260,8 +259,8 @@ export const hubspotConnector: ConnectorConfig = {
const paging = data.paging as { next?: { after?: string } } | undefined
const nextCursor = paging?.next?.after
const documents: ExternalDocument[] = await Promise.all(
results.map((record) => recordToDocument(record, objectType, portalId))
const documents: ExternalDocument[] = results.map((record) =>
recordToDocument(record, objectType, portalId)
)
const previouslyFetched = (syncContext?.totalDocsFetched as number) ?? 0
@@ -322,7 +321,7 @@ export const hubspotConnector: ConnectorConfig = {
}
const record = await response.json()
return recordToDocument(record, objectType, portalId)
return recordToDocument(record as Record<string, unknown>, objectType, portalId)
},
validateConfig: async (

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { IntercomIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, parseTagDate } from '@/connectors/utils'
const logger = createLogger('IntercomConnector')
@@ -309,7 +309,6 @@ export const intercomConnector: ConnectorConfig = {
const content = formatArticle(article)
if (!content.trim()) continue
const contentHash = await computeContentHash(content)
const updatedAt = new Date(article.updated_at * 1000).toISOString()
documents.push({
@@ -318,7 +317,7 @@ export const intercomConnector: ConnectorConfig = {
content,
mimeType: 'text/plain',
sourceUrl: `https://app.intercom.com/a/apps/_/articles/articles/${article.id}/show`,
contentHash,
contentHash: `intercom:article-${article.id}:${article.updated_at}`,
metadata: {
type: 'article',
state: article.state,
@@ -337,28 +336,23 @@ export const intercomConnector: ConnectorConfig = {
const conversations = await fetchConversations(accessToken, maxItems, conversationState)
for (const conversation of conversations) {
const detail = await fetchConversationDetail(accessToken, conversation.id)
const content = formatConversation(detail)
if (!content.trim()) continue
const contentHash = await computeContentHash(content)
const updatedAt = new Date(conversation.updated_at * 1000).toISOString()
const tags = conversation.tags?.tags?.map((t) => t.name) || []
documents.push({
externalId: `conversation-${conversation.id}`,
title: conversation.title || `Conversation #${conversation.id}`,
content,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `https://app.intercom.com/a/apps/_/inbox/inbox/all/conversations/${conversation.id}`,
contentHash,
contentHash: `intercom:conversation-${conversation.id}:${conversation.updated_at}`,
metadata: {
type: 'conversation',
state: conversation.state,
tags: tags.join(', '),
updatedAt,
createdAt: new Date(conversation.created_at * 1000).toISOString(),
messageCount: (detail.conversation_parts?.total_count ?? 0) + 1,
},
})
}
@@ -383,7 +377,6 @@ export const intercomConnector: ConnectorConfig = {
const content = formatArticle(article)
if (!content.trim()) return null
const contentHash = await computeContentHash(content)
const updatedAt = new Date(article.updated_at * 1000).toISOString()
return {
@@ -392,7 +385,7 @@ export const intercomConnector: ConnectorConfig = {
content,
mimeType: 'text/plain',
sourceUrl: `https://app.intercom.com/a/apps/_/articles/articles/${article.id}/show`,
contentHash,
contentHash: `intercom:article-${article.id}:${article.updated_at}`,
metadata: {
type: 'article',
state: article.state,
@@ -410,7 +403,6 @@ export const intercomConnector: ConnectorConfig = {
const content = formatConversation(detail)
if (!content.trim()) return null
const contentHash = await computeContentHash(content)
const updatedAt = new Date(detail.updated_at * 1000).toISOString()
const tags = detail.tags?.tags?.map((t) => t.name) || []
@@ -418,9 +410,10 @@ export const intercomConnector: ConnectorConfig = {
externalId,
title: detail.title || `Conversation #${detail.id}`,
content,
contentDeferred: false,
mimeType: 'text/plain',
sourceUrl: `https://app.intercom.com/a/apps/_/inbox/inbox/all/conversations/${detail.id}`,
contentHash,
contentHash: `intercom:conversation-${detail.id}:${detail.updated_at}`,
metadata: {
type: 'conversation',
state: detail.state,

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { JiraIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, joinTagArray, parseTagDate } from '@/connectors/utils'
import { joinTagArray, parseTagDate } from '@/connectors/utils'
import { extractAdfText, getJiraCloudId } from '@/tools/jira/utils'
const logger = createLogger('JiraConnector')
@@ -33,16 +33,12 @@ function buildIssueContent(fields: Record<string, unknown>): string {
}
/**
* Converts a Jira issue API response to an ExternalDocument.
* Extracts common metadata fields from a Jira issue into an ExternalDocument
* stub with deferred content. The contentHash is metadata-based so it is
* identical whether produced during listing or full fetch.
*/
async function issueToDocument(
issue: Record<string, unknown>,
domain: string
): Promise<ExternalDocument> {
function issueToStub(issue: Record<string, unknown>, domain: string): ExternalDocument {
const fields = (issue.fields || {}) as Record<string, unknown>
const content = buildIssueContent(fields)
const contentHash = await computeContentHash(content)
const key = issue.key as string
const issueType = fields.issuetype as Record<string, unknown> | undefined
const status = fields.status as Record<string, unknown> | undefined
@@ -51,14 +47,16 @@ async function issueToDocument(
const reporter = fields.reporter as Record<string, unknown> | undefined
const project = fields.project as Record<string, unknown> | undefined
const labels = Array.isArray(fields.labels) ? (fields.labels as string[]) : []
const updated = (fields.updated as string) ?? ''
return {
externalId: String(issue.id),
title: `${key}: ${(fields.summary as string) || 'Untitled'}`,
content,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `https://${domain}/browse/${key}`,
contentHash,
contentHash: `jira:${issue.id}:${updated}`,
metadata: {
key,
issueType: issueType?.name,
@@ -74,6 +72,22 @@ async function issueToDocument(
}
}
/**
* Converts a fully-fetched Jira issue (with description and comments) into an
* ExternalDocument with resolved content.
*/
function issueToFullDocument(issue: Record<string, unknown>, domain: string): ExternalDocument {
const stub = issueToStub(issue, domain)
const fields = (issue.fields || {}) as Record<string, unknown>
const content = buildIssueContent(fields)
return {
...stub,
content,
contentDeferred: false,
}
}
export const jiraConnector: ConnectorConfig = {
id: 'jira',
name: 'Jira',
@@ -162,7 +176,7 @@ export const jiraConnector: ConnectorConfig = {
params.append('maxResults', String(Math.min(PAGE_SIZE, remaining)))
params.append(
'fields',
'summary,description,comment,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
'summary,issuetype,status,priority,assignee,reporter,project,labels,created,updated'
)
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params.toString()}`
@@ -190,9 +204,7 @@ export const jiraConnector: ConnectorConfig = {
const issues = (data.issues || []) as Record<string, unknown>[]
const total = (data.total as number) ?? 0
const documents: ExternalDocument[] = await Promise.all(
issues.map((issue) => issueToDocument(issue, domain))
)
const documents: ExternalDocument[] = issues.map((issue) => issueToStub(issue, domain))
const nextStart = startAt + issues.length
const hasMore = nextStart < total && (maxIssues <= 0 || nextStart < maxIssues)
@@ -239,7 +251,7 @@ export const jiraConnector: ConnectorConfig = {
}
const issue = await response.json()
return issueToDocument(issue, domain)
return issueToFullDocument(issue, domain)
},
validateConfig: async (

View File

@@ -3,7 +3,7 @@ import { LinearIcon } from '@/components/icons'
import type { RetryOptions } from '@/lib/knowledge/documents/utils'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, joinTagArray, parseTagDate } from '@/connectors/utils'
import { joinTagArray, parseTagDate } from '@/connectors/utils'
const logger = createLogger('LinearConnector')
@@ -278,36 +278,34 @@ export const linearConnector: ConnectorConfig = {
const nodes = (issuesConn.nodes || []) as Record<string, unknown>[]
const pageInfo = issuesConn.pageInfo as Record<string, unknown>
const documents: ExternalDocument[] = await Promise.all(
nodes.map(async (issue) => {
const content = buildIssueContent(issue)
const contentHash = await computeContentHash(content)
const documents: ExternalDocument[] = nodes.map((issue) => {
const content = buildIssueContent(issue)
const contentHash = `linear:${issue.id}:${issue.updatedAt}`
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
string,
unknown
>[]
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
string,
unknown
>[]
return {
externalId: issue.id as string,
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
content,
mimeType: 'text/plain' as const,
sourceUrl: (issue.url as string) || undefined,
contentHash,
metadata: {
identifier: issue.identifier,
state: (issue.state as Record<string, unknown>)?.name,
priority: issue.priorityLabel,
assignee: (issue.assignee as Record<string, unknown>)?.name,
labels: labelNodes.map((l) => l.name as string),
team: (issue.team as Record<string, unknown>)?.name,
project: (issue.project as Record<string, unknown>)?.name,
lastModified: issue.updatedAt,
},
}
})
)
return {
externalId: issue.id as string,
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
content,
mimeType: 'text/plain' as const,
sourceUrl: (issue.url as string) || undefined,
contentHash,
metadata: {
identifier: issue.identifier,
state: (issue.state as Record<string, unknown>)?.name,
priority: issue.priorityLabel,
assignee: (issue.assignee as Record<string, unknown>)?.name,
labels: labelNodes.map((l) => l.name as string),
team: (issue.team as Record<string, unknown>)?.name,
project: (issue.project as Record<string, unknown>)?.name,
lastModified: issue.updatedAt,
},
}
})
const hasNextPage = Boolean(pageInfo.hasNextPage)
const endCursor = (pageInfo.endCursor as string) || undefined
@@ -335,7 +333,7 @@ export const linearConnector: ConnectorConfig = {
if (!issue) return null
const content = buildIssueContent(issue)
const contentHash = await computeContentHash(content)
const contentHash = `linear:${issue.id}:${issue.updatedAt}`
const labelNodes = ((issue.labels as Record<string, unknown>)?.nodes || []) as Record<
string,
@@ -346,7 +344,7 @@ export const linearConnector: ConnectorConfig = {
externalId: issue.id as string,
title: `${(issue.identifier as string) || ''}: ${(issue.title as string) || 'Untitled'}`,
content,
mimeType: 'text/plain',
mimeType: 'text/plain' as const,
sourceUrl: (issue.url as string) || undefined,
contentHash,
metadata: {
@@ -379,7 +377,6 @@ export const linearConnector: ConnectorConfig = {
}
try {
// Verify the token works by fetching teams
const data = await linearGraphQL(accessToken, TEAMS_QUERY, undefined, VALIDATE_RETRY_OPTIONS)
const teamsConn = data.teams as Record<string, unknown>
const teams = (teamsConn.nodes || []) as Record<string, unknown>[]
@@ -391,7 +388,6 @@ export const linearConnector: ConnectorConfig = {
}
}
// If teamId specified, verify it exists
const teamId = sourceConfig.teamId as string | undefined
if (teamId) {
const found = teams.some((t) => t.id === teamId)

View File

@@ -2,14 +2,37 @@ import { createLogger } from '@sim/logger'
import { OutlookIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, parseTagDate } from '@/connectors/utils'
const logger = createLogger('OutlookConnector')
const GRAPH_API_BASE = 'https://graph.microsoft.com/v1.0/me'
const DEFAULT_MAX_CONVERSATIONS = 500
const MESSAGES_PER_PAGE = 50
const MESSAGE_FIELDS = [
/**
* Fields requested when listing messages (no body — deferred to getDocument).
*/
const LIST_MESSAGE_FIELDS = [
'id',
'conversationId',
'subject',
'from',
'toRecipients',
'receivedDateTime',
'sentDateTime',
'categories',
'importance',
'inferenceClassification',
'hasAttachments',
'webLink',
'isDraft',
'parentFolderId',
].join(',')
/**
* Fields requested when fetching full message content in getDocument.
*/
const FULL_MESSAGE_FIELDS = [
'id',
'conversationId',
'subject',
@@ -84,7 +107,7 @@ function buildInitialUrl(sourceConfig: Record<string, unknown>): string {
const params = new URLSearchParams({
$top: String(MESSAGES_PER_PAGE),
$select: MESSAGE_FIELDS,
$select: LIST_MESSAGE_FIELDS,
})
// Build $filter clauses
@@ -353,7 +376,6 @@ export const outlookConnector: ConnectorConfig = {
const headers: Record<string, string> = {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
Prefer: 'outlook.body-content-type="text"',
}
const response = await fetchWithRetry(url, { method: 'GET', headers })
@@ -385,7 +407,8 @@ export const outlookConnector: ConnectorConfig = {
continue
}
const convId = msg.conversationId || msg.id
if (!msg.conversationId) continue
const convId = msg.conversationId
if (!conversations[convId]) {
conversations[convId] = []
}
@@ -407,8 +430,8 @@ export const outlookConnector: ConnectorConfig = {
}
}
// Phase 2: Group conversations into documents
logger.info('Grouping Outlook messages into conversations', {
// Phase 2: Build lightweight stubs — content is deferred to getDocument
logger.info('Building Outlook conversation stubs', {
totalMessages: syncContext?._totalMessagesFetched,
totalConversations: Object.keys(conversations).length,
})
@@ -433,23 +456,26 @@ export const outlookConnector: ConnectorConfig = {
const documents: ExternalDocument[] = []
for (const [convId, msgs] of limited) {
const result = formatConversation(convId, msgs)
if (!result) continue
if (msgs.length === 0) continue
const contentHash = await computeContentHash(result.content)
const lastDate = msgs.reduce((max, m) => {
const d = m.receivedDateTime || ''
return d > max ? d : max
}, '')
// Use the first message's webLink as the source URL
const subject = msgs[0].subject || 'No Subject'
const firstWithLink = msgs.find((m) => m.webLink)
const sourceUrl = firstWithLink?.webLink || `https://outlook.office.com/mail/inbox`
const sourceUrl = firstWithLink?.webLink || 'https://outlook.office.com/mail/inbox'
documents.push({
externalId: convId,
title: result.subject,
content: result.content,
title: subject,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl,
contentHash,
metadata: result.metadata,
contentHash: `outlook:${convId}:${lastDate}`,
metadata: {},
})
}
@@ -462,14 +488,25 @@ export const outlookConnector: ConnectorConfig = {
externalId: string
): Promise<ExternalDocument | null> => {
try {
// Fetch messages for this conversation
// Scope to the same folder as listDocuments so contentHash stays consistent
const folder = (sourceConfig.folder as string) || 'inbox'
const basePath =
folder === 'all'
? `${GRAPH_API_BASE}/messages`
: `${GRAPH_API_BASE}/mailFolders/${WELL_KNOWN_FOLDERS[folder] || folder}/messages`
const filterParts = [
`conversationId eq '${externalId.replace(/'/g, "''")}'`,
'isDraft eq false',
]
const params = new URLSearchParams({
$filter: `conversationId eq '${externalId.replace(/'/g, "''")}'`,
$select: MESSAGE_FIELDS,
$top: '50',
$filter: filterParts.join(' and '),
$select: FULL_MESSAGE_FIELDS,
$top: '250',
})
const url = `${GRAPH_API_BASE}/messages?${params.toString()}`
const url = `${basePath}?${params.toString()}`
const response = await fetchWithRetry(url, {
method: 'GET',
@@ -493,16 +530,21 @@ export const outlookConnector: ConnectorConfig = {
const result = formatConversation(externalId, messages)
if (!result) return null
const contentHash = await computeContentHash(result.content)
const lastDate = messages.reduce((max, m) => {
const d = m.receivedDateTime || ''
return d > max ? d : max
}, '')
const firstWithLink = messages.find((m) => m.webLink)
return {
externalId,
title: result.subject,
content: result.content,
contentDeferred: false,
mimeType: 'text/plain',
sourceUrl: firstWithLink?.webLink || 'https://outlook.office.com/mail/inbox',
contentHash,
contentHash: `outlook:${externalId}:${lastDate}`,
metadata: result.metadata,
}
} catch (error) {

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { RedditIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, parseTagDate } from '@/connectors/utils'
import { parseTagDate } from '@/connectors/utils'
const logger = createLogger('RedditConnector')
@@ -338,29 +338,23 @@ export const redditConnector: ConnectorConfig = {
afterToken
)
const documents: ExternalDocument[] = []
for (const post of posts) {
const content = await formatPostContent(accessToken, post, COMMENTS_PER_POST)
const contentHash = await computeContentHash(content)
documents.push({
externalId: post.id,
title: post.title,
content,
mimeType: 'text/plain',
sourceUrl: `https://www.reddit.com${post.permalink}`,
contentHash,
metadata: {
author: post.author,
score: post.score,
commentCount: post.num_comments,
flair: post.link_flair_text ?? undefined,
postDate: new Date(post.created_utc * 1000).toISOString(),
subreddit: post.subreddit,
},
})
}
const documents: ExternalDocument[] = posts.map((post) => ({
externalId: post.id,
title: post.title,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `https://www.reddit.com${post.permalink}`,
contentHash: `reddit:${post.id}:${post.created_utc}`,
metadata: {
author: post.author,
score: post.score,
commentCount: post.num_comments,
flair: post.link_flair_text ?? undefined,
postDate: new Date(post.created_utc * 1000).toISOString(),
subreddit: post.subreddit,
},
}))
const totalCollected = collectedSoFar + documents.length
const hasMore = after !== null && totalCollected < maxPosts
@@ -397,15 +391,15 @@ export const redditConnector: ConnectorConfig = {
const comments =
data.length >= 2 ? extractComments(data[1] as RedditListing, COMMENTS_PER_POST) : []
const content = await formatPostContent(accessToken, post, COMMENTS_PER_POST, comments)
const contentHash = await computeContentHash(content)
return {
externalId: post.id,
title: post.title,
content,
contentDeferred: false,
mimeType: 'text/plain',
sourceUrl: `https://www.reddit.com${post.permalink}`,
contentHash,
contentHash: `reddit:${post.id}:${post.created_utc}`,
metadata: {
author: post.author,
score: post.score,

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { SalesforceIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, parseTagDate } from '@/connectors/utils'
const logger = createLogger('SalesforceConnector')
@@ -12,7 +12,14 @@ const PAGE_SIZE = 200
/** SOQL field lists per object type. */
const OBJECT_FIELDS: Record<string, string[]> = {
KnowledgeArticleVersion: ['Id', 'Title', 'Summary', 'LastModifiedDate', 'ArticleNumber'],
KnowledgeArticleVersion: [
'Id',
'Title',
'Summary',
'LastModifiedDate',
'ArticleNumber',
'PublishStatus',
],
Case: ['Id', 'Subject', 'Description', 'Status', 'LastModifiedDate', 'CaseNumber'],
Account: ['Id', 'Name', 'Description', 'Industry', 'LastModifiedDate'],
Opportunity: [
@@ -146,36 +153,52 @@ function getRecordStatus(objectType: string, record: Record<string, unknown>): s
}
/**
* Converts a Salesforce record to an ExternalDocument.
* Creates a lightweight stub for a Salesforce record with metadata-based hash.
* Content is deferred and fetched later via getDocument only for new/changed docs.
*/
async function recordToDocument(
function recordToStub(
record: Record<string, unknown>,
objectType: string,
instanceUrl: string
): Promise<ExternalDocument> {
): ExternalDocument {
const id = record.Id as string
const content = buildRecordContent(objectType, record)
const contentHash = await computeContentHash(content)
const title = buildRecordTitle(objectType, record)
const lastModified = (record.LastModifiedDate as string) || ''
const baseUrl = instanceUrl.replace(`/services/data/${API_VERSION}/`, '')
return {
externalId: id,
title,
content,
content: '',
contentDeferred: true,
mimeType: 'text/plain',
sourceUrl: `${baseUrl}/${id}`,
contentHash,
contentHash: `salesforce:${id}:${lastModified}`,
metadata: {
objectType,
lastModified: (record.LastModifiedDate as string) || undefined,
lastModified: lastModified || undefined,
recordNumber: getRecordNumber(objectType, record),
status: getRecordStatus(objectType, record),
},
}
}
/**
* Builds a full ExternalDocument with content from a Salesforce record.
*/
function recordToDocument(
record: Record<string, unknown>,
objectType: string,
instanceUrl: string
): ExternalDocument {
const stub = recordToStub(record, objectType, instanceUrl)
return {
...stub,
content: buildRecordContent(objectType, record),
contentDeferred: false,
}
}
export const salesforceConnector: ConnectorConfig = {
id: 'salesforce',
name: 'Salesforce',
@@ -257,8 +280,8 @@ export const salesforceConnector: ConnectorConfig = {
const records = (data.records || []) as Record<string, unknown>[]
const nextRecordsUrl = data.nextRecordsUrl as string | undefined
const documents: ExternalDocument[] = await Promise.all(
records.map((record) => recordToDocument(record, objectType, instanceUrl))
const documents: ExternalDocument[] = records.map((record) =>
recordToStub(record, objectType, instanceUrl)
)
const previouslyFetched = (syncContext?.totalDocsFetched as number) ?? 0

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { ServiceNowIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, parseTagDate } from '@/connectors/utils'
const logger = createLogger('ServiceNowConnector')
@@ -184,15 +184,13 @@ function priorityLabel(priority: string | undefined): string {
/**
* Converts a KB article record to an ExternalDocument.
*/
async function kbArticleToDocument(
article: KBArticle,
instanceUrl: string
): Promise<ExternalDocument> {
function kbArticleToDocument(article: KBArticle, instanceUrl: string): ExternalDocument {
const title = rawValue(article.short_description) || rawValue(article.number) || article.sys_id
const articleText = rawValue(article.text) || rawValue(article.wiki) || ''
const content = htmlToPlainText(articleText)
const contentHash = await computeContentHash(content)
const sysId = rawValue(article.sys_id as unknown as string) || article.sys_id
const updatedOn = rawValue(article.sys_updated_on) || ''
const contentHash = `servicenow:${sysId}:${updatedOn}`
const sourceUrl = `${instanceUrl}/kb_view.do?sys_kb_id=${sysId}`
return {
@@ -218,10 +216,7 @@ async function kbArticleToDocument(
/**
* Converts an incident record to an ExternalDocument.
*/
async function incidentToDocument(
incident: Incident,
instanceUrl: string
): Promise<ExternalDocument> {
function incidentToDocument(incident: Incident, instanceUrl: string): ExternalDocument {
const number = rawValue(incident.number)
const shortDesc = rawValue(incident.short_description)
const title = number ? `${number}: ${shortDesc || 'Untitled'}` : shortDesc || incident.sys_id
@@ -258,8 +253,9 @@ async function incidentToDocument(
}
const content = parts.join('\n')
const contentHash = await computeContentHash(content)
const sysId = rawValue(incident.sys_id as unknown as string) || incident.sys_id
const updatedOn = rawValue(incident.sys_updated_on) || ''
const contentHash = `servicenow:${sysId}:${updatedOn}`
const sourceUrl = `${instanceUrl}/incident.do?sys_id=${sysId}`
return {
@@ -478,8 +474,8 @@ export const servicenowConnector: ConnectorConfig = {
const documents: ExternalDocument[] = []
for (const record of result) {
const doc = isKB
? await kbArticleToDocument(record as unknown as KBArticle, instanceUrl)
: await incidentToDocument(record as unknown as Incident, instanceUrl)
? kbArticleToDocument(record as unknown as KBArticle, instanceUrl)
: incidentToDocument(record as unknown as Incident, instanceUrl)
if (doc.content.trim()) {
documents.push(doc)
@@ -532,8 +528,8 @@ export const servicenowConnector: ConnectorConfig = {
const record = result[0]
const doc = isKB
? await kbArticleToDocument(record as unknown as KBArticle, instanceUrl)
: await incidentToDocument(record as unknown as Incident, instanceUrl)
? kbArticleToDocument(record as unknown as KBArticle, instanceUrl)
: incidentToDocument(record as unknown as Incident, instanceUrl)
return doc.content.trim() ? doc : null
} catch (error) {

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { WebflowIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, parseTagDate } from '@/connectors/utils'
const logger = createLogger('WebflowConnector')
@@ -194,8 +194,8 @@ export const webflowConnector: ConnectorConfig = {
}
const items = data.items || []
let documents: ExternalDocument[] = await Promise.all(
items.map((item) => itemToDocument(item, currentCollectionId, collectionName))
let documents: ExternalDocument[] = items.map((item) =>
itemToDocument(item, currentCollectionId, collectionName)
)
if (maxItems > 0) {
@@ -373,13 +373,14 @@ export const webflowConnector: ConnectorConfig = {
/**
* Converts a Webflow CMS item to an ExternalDocument.
*/
async function itemToDocument(
function itemToDocument(
item: WebflowItem,
collectionId: string,
collectionName: string
): Promise<ExternalDocument> {
): ExternalDocument {
const plainText = itemToPlainText(item, collectionName)
const contentHash = await computeContentHash(plainText)
const lastModified = item.lastUpdated || item.lastPublished || item.createdOn || ''
const contentHash = `webflow:${item.id}:${lastModified}`
const title = extractItemTitle(item)
const slug = (item.fieldData?.slug as string) || ''

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { WordpressIcon } from '@/components/icons'
import { fetchWithRetry, VALIDATE_RETRY_OPTIONS } from '@/lib/knowledge/documents/utils'
import type { ConnectorConfig, ExternalDocument, ExternalDocumentList } from '@/connectors/types'
import { computeContentHash, htmlToPlainText, joinTagArray, parseTagDate } from '@/connectors/utils'
import { htmlToPlainText, joinTagArray, parseTagDate } from '@/connectors/utils'
const logger = createLogger('WordPressConnector')
@@ -59,10 +59,10 @@ function extractTagNames(tags: Record<string, { name: string }>): string[] {
/**
* Converts a WordPress post to an ExternalDocument.
*/
async function postToDocument(post: WordPressPost): Promise<ExternalDocument> {
function postToDocument(post: WordPressPost): ExternalDocument {
const plainText = htmlToPlainText(post.content)
const fullContent = `# ${post.title}\n\n${plainText}`
const contentHash = await computeContentHash(fullContent)
const contentHash = `wordpress:${post.ID}:${post.modified || ''}`
const categories = extractCategoryNames(post.categories)
const tags = extractTagNames(post.tags)
@@ -182,7 +182,7 @@ export const wordpressConnector: ConnectorConfig = {
const data = (await response.json()) as WordPressPostsResponse
const posts = data.posts || []
const documents = await Promise.all(posts.map(postToDocument))
const documents = posts.map(postToDocument)
const totalFetched = totalDocsFetched + documents.length
if (syncContext) syncContext.totalDocsFetched = totalFetched
@@ -226,7 +226,7 @@ export const wordpressConnector: ConnectorConfig = {
}
const post = (await response.json()) as WordPressPost
return await postToDocument(post)
return postToDocument(post)
} catch (error) {
logger.warn('Failed to get WordPress document', {
externalId,

Some files were not shown because too many files have changed in this diff Show More