mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-17 10:57:56 -05:00
Compare commits
7 Commits
improvemen
...
staging
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5de7228dd9 | ||
|
|
75898c69ed | ||
|
|
b14672887b | ||
|
|
d024c1e489 | ||
|
|
d75ea37b3c | ||
|
|
fd23220cc3 | ||
|
|
a8d81097fc |
@@ -9,12 +9,12 @@
|
||||
<p align="center">
|
||||
<a href="https://sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/sim.ai-6F3DFA" alt="Sim.ai"></a>
|
||||
<a href="https://discord.gg/Hr4UWYEcTT" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Discord-Join%20Server-5865F2?logo=discord&logoColor=white" alt="Discord"></a>
|
||||
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simstudioai?style=social" alt="Twitter"></a>
|
||||
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a> <a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/DeepWiki-1E90FF.svg" alt="DeepWiki"></a>
|
||||
<a href="https://x.com/simdotai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/twitter/follow/simdotai?style=social" alt="Twitter"></a>
|
||||
<a href="https://docs.sim.ai" target="_blank" rel="noopener noreferrer"><img src="https://img.shields.io/badge/Docs-6F3DFA.svg" alt="Documentation"></a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
<a href="https://deepwiki.com/simstudioai/sim" target="_blank" rel="noopener noreferrer"><img src="https://deepwiki.com/badge.svg" alt="Ask DeepWiki"></a> <a href="https://cursor.com/link/prompt?text=Help%20me%20set%20up%20Sim%20Studio%20locally.%20Follow%20these%20steps%3A%0A%0A1.%20First%2C%20verify%20Docker%20is%20installed%20and%20running%3A%0A%20%20%20docker%20--version%0A%20%20%20docker%20info%0A%0A2.%20Clone%20the%20repository%3A%0A%20%20%20git%20clone%20https%3A%2F%2Fgithub.com%2Fsimstudioai%2Fsim.git%0A%20%20%20cd%20sim%0A%0A3.%20Start%20the%20services%20with%20Docker%20Compose%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20up%20-d%0A%0A4.%20Wait%20for%20all%20containers%20to%20be%20healthy%20(this%20may%20take%201-2%20minutes)%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.prod.yml%20ps%0A%0A5.%20Verify%20the%20app%20is%20accessible%20at%20http%3A%2F%2Flocalhost%3A3000%0A%0AIf%20there%20are%20any%20errors%2C%20help%20me%20troubleshoot%20them.%20Common%20issues%3A%0A-%20Port%203000%2C%203002%2C%20or%205432%20already%20in%20use%0A-%20Docker%20not%20running%0A-%20Insufficient%20memory%20(needs%2012GB%2B%20RAM)%0A%0AFor%20local%20AI%20models%20with%20Ollama%2C%20use%20this%20instead%20of%20step%203%3A%0A%20%20%20docker%20compose%20-f%20docker-compose.ollama.yml%20--profile%20setup%20up%20-d"><img src="https://img.shields.io/badge/Set%20Up%20with-Cursor-000000?logo=cursor&logoColor=white" alt="Set Up with Cursor"></a>
|
||||
</p>
|
||||
|
||||
### Build Workflows with Ease
|
||||
|
||||
@@ -36,43 +36,47 @@ Connect Google Vault to create exports, list exports, and manage holds within ma
|
||||
|
||||
### `google_vault_create_matters_export`
|
||||
|
||||
Create an export in a matter
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `matterId` | string | Yes | The matter ID |
|
||||
| `exportName` | string | Yes | Name for the export \(avoid special characters\) |
|
||||
| `corpus` | string | Yes | Data corpus to export \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
|
||||
| `accountEmails` | string | No | Comma-separated list of user emails to scope export |
|
||||
| `orgUnitId` | string | No | Organization unit ID to scope export \(alternative to emails\) |
|
||||
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, e.g., 2024-01-01T00:00:00Z\) |
|
||||
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, e.g., 2024-12-31T23:59:59Z\) |
|
||||
| `terms` | string | No | Search query terms to filter exported content |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `export` | json | Created export object |
|
||||
|
||||
### `google_vault_list_matters_export`
|
||||
|
||||
List exports for a matter
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `matterId` | string | Yes | The matter ID |
|
||||
| `pageSize` | number | No | Number of exports to return per page |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
| `exportId` | string | No | Optional export ID to fetch a specific export |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `exports` | json | Array of export objects |
|
||||
| `export` | json | Single export object \(when exportId is provided\) |
|
||||
| `nextPageToken` | string | Token for fetching next page of results |
|
||||
|
||||
### `google_vault_download_export_file`
|
||||
|
||||
@@ -82,10 +86,10 @@ Download a single file from a Google Vault export (GCS object)
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `matterId` | string | Yes | No description |
|
||||
| `bucketName` | string | Yes | No description |
|
||||
| `objectName` | string | Yes | No description |
|
||||
| `fileName` | string | No | No description |
|
||||
| `matterId` | string | Yes | The matter ID |
|
||||
| `bucketName` | string | Yes | GCS bucket name from cloudStorageSink.files.bucketName |
|
||||
| `objectName` | string | Yes | GCS object name from cloudStorageSink.files.objectName |
|
||||
| `fileName` | string | No | Optional filename override for the downloaded file |
|
||||
|
||||
#### Output
|
||||
|
||||
@@ -95,82 +99,84 @@ Download a single file from a Google Vault export (GCS object)
|
||||
|
||||
### `google_vault_create_matters_holds`
|
||||
|
||||
Create a hold in a matter
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `matterId` | string | Yes | The matter ID |
|
||||
| `holdName` | string | Yes | Name for the hold |
|
||||
| `corpus` | string | Yes | Data corpus to hold \(MAIL, DRIVE, GROUPS, HANGOUTS_CHAT, VOICE\) |
|
||||
| `accountEmails` | string | No | Comma-separated list of user emails to put on hold |
|
||||
| `orgUnitId` | string | No | Organization unit ID to put on hold \(alternative to accounts\) |
|
||||
| `terms` | string | No | Search terms to filter held content \(for MAIL and GROUPS corpus\) |
|
||||
| `startTime` | string | No | Start time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
|
||||
| `endTime` | string | No | End time for date filtering \(ISO 8601 format, for MAIL and GROUPS corpus\) |
|
||||
| `includeSharedDrives` | boolean | No | Include files in shared drives \(for DRIVE corpus\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `hold` | json | Created hold object |
|
||||
|
||||
### `google_vault_list_matters_holds`
|
||||
|
||||
List holds for a matter
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `matterId` | string | Yes | The matter ID |
|
||||
| `pageSize` | number | No | Number of holds to return per page |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
| `holdId` | string | No | Optional hold ID to fetch a specific hold |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `holds` | json | Array of hold objects |
|
||||
| `hold` | json | Single hold object \(when holdId is provided\) |
|
||||
| `nextPageToken` | string | Token for fetching next page of results |
|
||||
|
||||
### `google_vault_create_matters`
|
||||
|
||||
Create a new matter in Google Vault
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `name` | string | Yes | Name for the new matter |
|
||||
| `description` | string | No | Optional description for the matter |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `matter` | json | Created matter object |
|
||||
|
||||
### `google_vault_list_matters`
|
||||
|
||||
List matters, or get a specific matter if matterId is provided
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pageSize` | number | No | Number of matters to return per page |
|
||||
| `pageToken` | string | No | Token for pagination |
|
||||
| `matterId` | string | No | Optional matter ID to fetch a specific matter |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `matters` | json | Array of matter objects \(for list_matters\) |
|
||||
| `exports` | json | Array of export objects \(for list_matters_export\) |
|
||||
| `holds` | json | Array of hold objects \(for list_matters_holds\) |
|
||||
| `matter` | json | Created matter object \(for create_matters\) |
|
||||
| `export` | json | Created export object \(for create_matters_export\) |
|
||||
| `hold` | json | Created hold object \(for create_matters_holds\) |
|
||||
| `file` | json | Downloaded export file \(UserFile\) from execution files |
|
||||
| `nextPageToken` | string | Token for fetching next page of results \(for list operations\) |
|
||||
| `matters` | json | Array of matter objects |
|
||||
| `matter` | json | Single matter object \(when matterId is provided\) |
|
||||
| `nextPageToken` | string | Token for fetching next page of results |
|
||||
|
||||
|
||||
|
||||
@@ -168,12 +168,17 @@ const NoteMarkdown = memo(function NoteMarkdown({ content }: { content: string }
|
||||
)
|
||||
})
|
||||
|
||||
export const NoteBlock = memo(function NoteBlock({ id, data }: NodeProps<NoteBlockNodeData>) {
|
||||
export const NoteBlock = memo(function NoteBlock({
|
||||
id,
|
||||
data,
|
||||
selected,
|
||||
}: NodeProps<NoteBlockNodeData>) {
|
||||
const { type, config, name } = data
|
||||
|
||||
const { activeWorkflowId, isEnabled, handleClick, hasRing, ringStyles } = useBlockVisual({
|
||||
blockId: id,
|
||||
data,
|
||||
isSelected: selected,
|
||||
})
|
||||
const storedValues = useSubBlockStore(
|
||||
useCallback(
|
||||
|
||||
@@ -66,7 +66,7 @@ export interface SubflowNodeData {
|
||||
* @param props - Node properties containing data and id
|
||||
* @returns Rendered subflow node component
|
||||
*/
|
||||
export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeData>) => {
|
||||
export const SubflowNodeComponent = memo(({ data, id, selected }: NodeProps<SubflowNodeData>) => {
|
||||
const { getNodes } = useReactFlow()
|
||||
const blockRef = useRef<HTMLDivElement>(null)
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
@@ -134,13 +134,15 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
|
||||
/**
|
||||
* Determine the ring styling based on subflow state priority:
|
||||
* 1. Focused (selected in editor) or preview selected - blue ring
|
||||
* 1. Focused (selected in editor), selected (shift-click/box), or preview selected - blue ring
|
||||
* 2. Diff status (version comparison) - green/orange ring
|
||||
*/
|
||||
const hasRing = isFocused || isPreviewSelected || diffStatus === 'new' || diffStatus === 'edited'
|
||||
const isSelected = !isPreview && selected
|
||||
const hasRing =
|
||||
isFocused || isSelected || isPreviewSelected || diffStatus === 'new' || diffStatus === 'edited'
|
||||
const ringStyles = cn(
|
||||
hasRing && 'ring-[1.75px]',
|
||||
(isFocused || isPreviewSelected) && 'ring-[var(--brand-secondary)]',
|
||||
(isFocused || isSelected || isPreviewSelected) && 'ring-[var(--brand-secondary)]',
|
||||
diffStatus === 'new' && 'ring-[var(--brand-tertiary-2)]',
|
||||
diffStatus === 'edited' && 'ring-[var(--warning)]'
|
||||
)
|
||||
@@ -167,7 +169,7 @@ export const SubflowNodeComponent = memo(({ data, id }: NodeProps<SubflowNodeDat
|
||||
data-node-id={id}
|
||||
data-type='subflowNode'
|
||||
data-nesting-level={nestingLevel}
|
||||
data-subflow-selected={isFocused || isPreviewSelected}
|
||||
data-subflow-selected={isFocused || isSelected || isPreviewSelected}
|
||||
>
|
||||
{!isPreview && (
|
||||
<ActionBar blockId={id} blockType={data.kind} disabled={!userPermissions.canEdit} />
|
||||
|
||||
@@ -208,7 +208,6 @@ const tryParseJson = (value: unknown): unknown => {
|
||||
export const getDisplayValue = (value: unknown): string => {
|
||||
if (value == null || value === '') return '-'
|
||||
|
||||
// Try parsing JSON strings first
|
||||
const parsedValue = tryParseJson(value)
|
||||
|
||||
if (isMessagesArray(parsedValue)) {
|
||||
@@ -557,6 +556,7 @@ const SubBlockRow = ({
|
||||
export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
id,
|
||||
data,
|
||||
selected,
|
||||
}: NodeProps<WorkflowBlockProps>) {
|
||||
const { type, config, name, isPending } = data
|
||||
|
||||
@@ -574,7 +574,7 @@ export const WorkflowBlock = memo(function WorkflowBlock({
|
||||
hasRing,
|
||||
ringStyles,
|
||||
runPathStatus,
|
||||
} = useBlockVisual({ blockId: id, data, isPending })
|
||||
} = useBlockVisual({ blockId: id, data, isPending, isSelected: selected })
|
||||
|
||||
const currentBlock = currentWorkflow.getBlockById(id)
|
||||
|
||||
|
||||
@@ -17,6 +17,8 @@ interface UseBlockVisualProps {
|
||||
data: WorkflowBlockProps
|
||||
/** Whether the block is pending execution */
|
||||
isPending?: boolean
|
||||
/** Whether the block is selected (via shift-click or selection box) */
|
||||
isSelected?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -28,7 +30,12 @@ interface UseBlockVisualProps {
|
||||
* @param props - The hook properties
|
||||
* @returns Visual state, click handler, and ring styling for the block
|
||||
*/
|
||||
export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVisualProps) {
|
||||
export function useBlockVisual({
|
||||
blockId,
|
||||
data,
|
||||
isPending = false,
|
||||
isSelected = false,
|
||||
}: UseBlockVisualProps) {
|
||||
const isPreview = data.isPreview ?? false
|
||||
const isPreviewSelected = data.isPreviewSelected ?? false
|
||||
|
||||
@@ -42,7 +49,6 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis
|
||||
isDeletedBlock,
|
||||
} = useBlockState(blockId, currentWorkflow, data)
|
||||
|
||||
// Check if the editor panel is open for this block
|
||||
const currentBlockId = usePanelEditorStore((state) => state.currentBlockId)
|
||||
const activeTab = usePanelStore((state) => state.activeTab)
|
||||
const isEditorOpen = !isPreview && currentBlockId === blockId && activeTab === 'editor'
|
||||
@@ -68,6 +74,7 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis
|
||||
diffStatus: isPreview ? undefined : diffStatus,
|
||||
runPathStatus,
|
||||
isPreviewSelection: isPreview && isPreviewSelected,
|
||||
isSelected: isPreview ? false : isSelected,
|
||||
}),
|
||||
[
|
||||
isExecuting,
|
||||
@@ -78,6 +85,7 @@ export function useBlockVisual({ blockId, data, isPending = false }: UseBlockVis
|
||||
runPathStatus,
|
||||
isPreview,
|
||||
isPreviewSelected,
|
||||
isSelected,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -14,6 +14,8 @@ export interface BlockRingOptions {
|
||||
diffStatus: BlockDiffStatus
|
||||
runPathStatus: BlockRunPathStatus
|
||||
isPreviewSelection?: boolean
|
||||
/** Whether the block is selected via shift-click or selection box (shows blue ring) */
|
||||
isSelected?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -32,11 +34,13 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
diffStatus,
|
||||
runPathStatus,
|
||||
isPreviewSelection,
|
||||
isSelected,
|
||||
} = options
|
||||
|
||||
const hasRing =
|
||||
isExecuting ||
|
||||
isEditorOpen ||
|
||||
isSelected ||
|
||||
isPending ||
|
||||
diffStatus === 'new' ||
|
||||
diffStatus === 'edited' ||
|
||||
@@ -46,25 +50,37 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
const ringClassName = cn(
|
||||
// Executing block: pulsing success ring with prominent thickness (highest priority)
|
||||
isExecuting && 'ring-[3.5px] ring-[var(--border-success)] animate-ring-pulse',
|
||||
// Editor open or preview selection: static blue ring
|
||||
// Editor open, selected, or preview selection: static blue ring
|
||||
!isExecuting &&
|
||||
(isEditorOpen || isPreviewSelection) &&
|
||||
(isEditorOpen || isSelected || isPreviewSelection) &&
|
||||
'ring-[1.75px] ring-[var(--brand-secondary)]',
|
||||
// Non-active states use standard ring utilities
|
||||
!isExecuting && !isEditorOpen && !isPreviewSelection && hasRing && 'ring-[1.75px]',
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPreviewSelection &&
|
||||
hasRing &&
|
||||
'ring-[1.75px]',
|
||||
// Pending state: warning ring
|
||||
!isExecuting && !isEditorOpen && isPending && 'ring-[var(--warning)]',
|
||||
!isExecuting && !isEditorOpen && !isSelected && isPending && 'ring-[var(--warning)]',
|
||||
// Deleted state (highest priority after active/pending)
|
||||
!isExecuting && !isEditorOpen && !isPending && isDeletedBlock && 'ring-[var(--text-error)]',
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPending &&
|
||||
isDeletedBlock &&
|
||||
'ring-[var(--text-error)]',
|
||||
// Diff states
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'new' &&
|
||||
'ring-[var(--brand-tertiary-2)]',
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
diffStatus === 'edited' &&
|
||||
@@ -72,6 +88,7 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
// Run path states (lowest priority - only show if no other states active)
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
!diffStatus &&
|
||||
@@ -79,6 +96,7 @@ export function getBlockRingStyles(options: BlockRingOptions): {
|
||||
'ring-[var(--border-success)]',
|
||||
!isExecuting &&
|
||||
!isEditorOpen &&
|
||||
!isSelected &&
|
||||
!isPending &&
|
||||
!isDeletedBlock &&
|
||||
!diffStatus &&
|
||||
|
||||
@@ -700,7 +700,23 @@ const WorkflowContent = React.memo(() => {
|
||||
triggerMode,
|
||||
})
|
||||
|
||||
collaborativeBatchAddBlocks([block], autoConnectEdge ? [autoConnectEdge] : [], {}, {}, {})
|
||||
const subBlockValues: Record<string, Record<string, unknown>> = {}
|
||||
if (block.subBlocks && Object.keys(block.subBlocks).length > 0) {
|
||||
subBlockValues[id] = {}
|
||||
for (const [subBlockId, subBlock] of Object.entries(block.subBlocks)) {
|
||||
if (subBlock.value !== null && subBlock.value !== undefined) {
|
||||
subBlockValues[id][subBlockId] = subBlock.value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
collaborativeBatchAddBlocks(
|
||||
[block],
|
||||
autoConnectEdge ? [autoConnectEdge] : [],
|
||||
{},
|
||||
{},
|
||||
subBlockValues
|
||||
)
|
||||
usePanelEditorStore.getState().setCurrentBlockId(id)
|
||||
},
|
||||
[collaborativeBatchAddBlocks, setSelectedEdges]
|
||||
|
||||
@@ -406,21 +406,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
socketInstance.on('cursor-update', (data) => {
|
||||
setPresenceUsers((prev) => {
|
||||
const existingIndex = prev.findIndex((user) => user.socketId === data.socketId)
|
||||
if (existingIndex !== -1) {
|
||||
return prev.map((user) =>
|
||||
user.socketId === data.socketId ? { ...user, cursor: data.cursor } : user
|
||||
)
|
||||
if (existingIndex === -1) {
|
||||
logger.debug('Received cursor-update for unknown user', { socketId: data.socketId })
|
||||
return prev
|
||||
}
|
||||
return [
|
||||
...prev,
|
||||
{
|
||||
socketId: data.socketId,
|
||||
userId: data.userId,
|
||||
userName: data.userName,
|
||||
avatarUrl: data.avatarUrl,
|
||||
cursor: data.cursor,
|
||||
},
|
||||
]
|
||||
return prev.map((user) =>
|
||||
user.socketId === data.socketId ? { ...user, cursor: data.cursor } : user
|
||||
)
|
||||
})
|
||||
eventHandlers.current.cursorUpdate?.(data)
|
||||
})
|
||||
@@ -428,21 +420,15 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
|
||||
socketInstance.on('selection-update', (data) => {
|
||||
setPresenceUsers((prev) => {
|
||||
const existingIndex = prev.findIndex((user) => user.socketId === data.socketId)
|
||||
if (existingIndex !== -1) {
|
||||
return prev.map((user) =>
|
||||
user.socketId === data.socketId ? { ...user, selection: data.selection } : user
|
||||
)
|
||||
}
|
||||
return [
|
||||
...prev,
|
||||
{
|
||||
if (existingIndex === -1) {
|
||||
logger.debug('Received selection-update for unknown user', {
|
||||
socketId: data.socketId,
|
||||
userId: data.userId,
|
||||
userName: data.userName,
|
||||
avatarUrl: data.avatarUrl,
|
||||
selection: data.selection,
|
||||
},
|
||||
]
|
||||
})
|
||||
return prev
|
||||
}
|
||||
return prev.map((user) =>
|
||||
user.socketId === data.socketId ? { ...user, selection: data.selection } : user
|
||||
)
|
||||
})
|
||||
eventHandlers.current.selectionUpdate?.(data)
|
||||
})
|
||||
|
||||
@@ -159,6 +159,167 @@ Return ONLY the hold name - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Org Unit ID (alternative to emails)',
|
||||
condition: { field: 'operation', value: ['create_matters_holds', 'create_matters_export'] },
|
||||
},
|
||||
// Date filtering for exports (works with all corpus types)
|
||||
{
|
||||
id: 'startTime',
|
||||
title: 'Start Time',
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DDTHH:mm:ssZ',
|
||||
condition: { field: 'operation', value: 'create_matters_export' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an ISO 8601 timestamp in GMT based on the user's description for Google Vault date filtering.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:mm:ssZ (UTC timezone).
|
||||
Note: Google Vault rounds times to 12 AM on the specified date.
|
||||
Examples:
|
||||
- "yesterday" -> Calculate yesterday's date at 00:00:00Z
|
||||
- "last week" -> Calculate 7 days ago at 00:00:00Z
|
||||
- "beginning of this month" -> Calculate the 1st of current month at 00:00:00Z
|
||||
- "January 1, 2024" -> 2024-01-01T00:00:00Z
|
||||
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the start date (e.g., "last month", "January 1, 2024")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'endTime',
|
||||
title: 'End Time',
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DDTHH:mm:ssZ',
|
||||
condition: { field: 'operation', value: 'create_matters_export' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an ISO 8601 timestamp in GMT based on the user's description for Google Vault date filtering.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:mm:ssZ (UTC timezone).
|
||||
Note: Google Vault rounds times to 12 AM on the specified date.
|
||||
Examples:
|
||||
- "now" -> Current timestamp
|
||||
- "today" -> Today's date at 23:59:59Z
|
||||
- "end of last month" -> Last day of previous month at 23:59:59Z
|
||||
- "December 31, 2024" -> 2024-12-31T23:59:59Z
|
||||
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the end date (e.g., "today", "end of last quarter")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Date filtering for holds (only works with MAIL and GROUPS corpus)
|
||||
{
|
||||
id: 'holdStartTime',
|
||||
title: 'Start Time',
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DDTHH:mm:ssZ',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_matters_holds',
|
||||
and: { field: 'corpus', value: ['MAIL', 'GROUPS'] },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an ISO 8601 timestamp in GMT based on the user's description for Google Vault date filtering.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:mm:ssZ (UTC timezone).
|
||||
Note: Google Vault rounds times to 12 AM on the specified date.
|
||||
Examples:
|
||||
- "yesterday" -> Calculate yesterday's date at 00:00:00Z
|
||||
- "last week" -> Calculate 7 days ago at 00:00:00Z
|
||||
- "beginning of this month" -> Calculate the 1st of current month at 00:00:00Z
|
||||
- "January 1, 2024" -> 2024-01-01T00:00:00Z
|
||||
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the start date (e.g., "last month", "January 1, 2024")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'holdEndTime',
|
||||
title: 'End Time',
|
||||
type: 'short-input',
|
||||
placeholder: 'YYYY-MM-DDTHH:mm:ssZ',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_matters_holds',
|
||||
and: { field: 'corpus', value: ['MAIL', 'GROUPS'] },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an ISO 8601 timestamp in GMT based on the user's description for Google Vault date filtering.
|
||||
The timestamp should be in the format: YYYY-MM-DDTHH:mm:ssZ (UTC timezone).
|
||||
Note: Google Vault rounds times to 12 AM on the specified date.
|
||||
Examples:
|
||||
- "now" -> Current timestamp
|
||||
- "today" -> Today's date at 23:59:59Z
|
||||
- "end of last month" -> Last day of previous month at 23:59:59Z
|
||||
- "December 31, 2024" -> 2024-12-31T23:59:59Z
|
||||
|
||||
Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe the end date (e.g., "today", "end of last quarter")...',
|
||||
generationType: 'timestamp',
|
||||
},
|
||||
},
|
||||
// Search terms for exports (works with all corpus types)
|
||||
{
|
||||
id: 'terms',
|
||||
title: 'Search Terms',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter search query (e.g., from:user@example.com subject:confidential)',
|
||||
condition: { field: 'operation', value: 'create_matters_export' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Google Vault search query based on the user's description.
|
||||
The query can use Gmail-style search operators for MAIL corpus:
|
||||
- from:user@example.com - emails from specific sender
|
||||
- to:user@example.com - emails to specific recipient
|
||||
- subject:keyword - emails with keyword in subject
|
||||
- has:attachment - emails with attachments
|
||||
- filename:pdf - emails with PDF attachments
|
||||
- before:YYYY/MM/DD - emails before date
|
||||
- after:YYYY/MM/DD - emails after date
|
||||
|
||||
For DRIVE corpus, use Drive search operators:
|
||||
- owner:user@example.com - files owned by user
|
||||
- type:document - specific file types
|
||||
|
||||
Return ONLY the search query - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe what content to search for...',
|
||||
},
|
||||
},
|
||||
// Search terms for holds (only works with MAIL and GROUPS corpus)
|
||||
{
|
||||
id: 'holdTerms',
|
||||
title: 'Search Terms',
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter search query (e.g., from:user@example.com subject:confidential)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_matters_holds',
|
||||
and: { field: 'corpus', value: ['MAIL', 'GROUPS'] },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Google Vault search query based on the user's description.
|
||||
The query can use Gmail-style search operators:
|
||||
- from:user@example.com - emails from specific sender
|
||||
- to:user@example.com - emails to specific recipient
|
||||
- subject:keyword - emails with keyword in subject
|
||||
- has:attachment - emails with attachments
|
||||
- filename:pdf - emails with PDF attachments
|
||||
|
||||
Return ONLY the search query - no explanations, no quotes, no extra text.`,
|
||||
placeholder: 'Describe what content to search for...',
|
||||
},
|
||||
},
|
||||
// Drive-specific option for holds
|
||||
{
|
||||
id: 'includeSharedDrives',
|
||||
title: 'Include Shared Drives',
|
||||
type: 'switch',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_matters_holds',
|
||||
and: { field: 'corpus', value: 'DRIVE' },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'exportId',
|
||||
title: 'Export ID',
|
||||
@@ -277,10 +438,14 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const { credential, ...rest } = params
|
||||
const { credential, holdStartTime, holdEndTime, holdTerms, ...rest } = params
|
||||
return {
|
||||
...rest,
|
||||
credential,
|
||||
// Map hold-specific fields to their tool parameter names
|
||||
...(holdStartTime && { startTime: holdStartTime }),
|
||||
...(holdEndTime && { endTime: holdEndTime }),
|
||||
...(holdTerms && { terms: holdTerms }),
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -296,9 +461,28 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
||||
corpus: { type: 'string', description: 'Data corpus (MAIL, DRIVE, GROUPS, etc.)' },
|
||||
accountEmails: { type: 'string', description: 'Comma-separated account emails' },
|
||||
orgUnitId: { type: 'string', description: 'Organization unit ID' },
|
||||
startTime: { type: 'string', description: 'Start time for date filtering (ISO 8601 format)' },
|
||||
endTime: { type: 'string', description: 'End time for date filtering (ISO 8601 format)' },
|
||||
terms: { type: 'string', description: 'Search query terms' },
|
||||
|
||||
// Create hold inputs
|
||||
holdName: { type: 'string', description: 'Name for the hold' },
|
||||
holdStartTime: {
|
||||
type: 'string',
|
||||
description: 'Start time for hold date filtering (ISO 8601 format, MAIL/GROUPS only)',
|
||||
},
|
||||
holdEndTime: {
|
||||
type: 'string',
|
||||
description: 'End time for hold date filtering (ISO 8601 format, MAIL/GROUPS only)',
|
||||
},
|
||||
holdTerms: {
|
||||
type: 'string',
|
||||
description: 'Search query terms for hold (MAIL/GROUPS only)',
|
||||
},
|
||||
includeSharedDrives: {
|
||||
type: 'boolean',
|
||||
description: 'Include files in shared drives (for DRIVE corpus holds)',
|
||||
},
|
||||
|
||||
// Download export file inputs
|
||||
bucketName: { type: 'string', description: 'GCS bucket name from export' },
|
||||
@@ -316,12 +500,32 @@ Return ONLY the description text - no explanations, no quotes, no extra text.`,
|
||||
description: { type: 'string', description: 'Matter description' },
|
||||
},
|
||||
outputs: {
|
||||
matters: { type: 'json', description: 'Array of matter objects (for list_matters)' },
|
||||
exports: { type: 'json', description: 'Array of export objects (for list_matters_export)' },
|
||||
holds: { type: 'json', description: 'Array of hold objects (for list_matters_holds)' },
|
||||
matter: { type: 'json', description: 'Created matter object (for create_matters)' },
|
||||
export: { type: 'json', description: 'Created export object (for create_matters_export)' },
|
||||
hold: { type: 'json', description: 'Created hold object (for create_matters_holds)' },
|
||||
matters: {
|
||||
type: 'json',
|
||||
description: 'Array of matter objects (for list_matters without matterId)',
|
||||
},
|
||||
exports: {
|
||||
type: 'json',
|
||||
description: 'Array of export objects (for list_matters_export without exportId)',
|
||||
},
|
||||
holds: {
|
||||
type: 'json',
|
||||
description: 'Array of hold objects (for list_matters_holds without holdId)',
|
||||
},
|
||||
matter: {
|
||||
type: 'json',
|
||||
description: 'Single matter object (for create_matters or list_matters with matterId)',
|
||||
},
|
||||
export: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Single export object (for create_matters_export or list_matters_export with exportId)',
|
||||
},
|
||||
hold: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Single hold object (for create_matters_holds or list_matters_holds with holdId)',
|
||||
},
|
||||
file: { type: 'json', description: 'Downloaded export file (UserFile) from execution files' },
|
||||
nextPageToken: {
|
||||
type: 'string',
|
||||
|
||||
29
apps/sim/executor/handlers/index.ts
Normal file
29
apps/sim/executor/handlers/index.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { AgentBlockHandler } from '@/executor/handlers/agent/agent-handler'
|
||||
import { ApiBlockHandler } from '@/executor/handlers/api/api-handler'
|
||||
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
|
||||
import { EvaluatorBlockHandler } from '@/executor/handlers/evaluator/evaluator-handler'
|
||||
import { FunctionBlockHandler } from '@/executor/handlers/function/function-handler'
|
||||
import { GenericBlockHandler } from '@/executor/handlers/generic/generic-handler'
|
||||
import { HumanInTheLoopBlockHandler } from '@/executor/handlers/human-in-the-loop/human-in-the-loop-handler'
|
||||
import { ResponseBlockHandler } from '@/executor/handlers/response/response-handler'
|
||||
import { RouterBlockHandler } from '@/executor/handlers/router/router-handler'
|
||||
import { TriggerBlockHandler } from '@/executor/handlers/trigger/trigger-handler'
|
||||
import { VariablesBlockHandler } from '@/executor/handlers/variables/variables-handler'
|
||||
import { WaitBlockHandler } from '@/executor/handlers/wait/wait-handler'
|
||||
import { WorkflowBlockHandler } from '@/executor/handlers/workflow/workflow-handler'
|
||||
|
||||
export {
|
||||
AgentBlockHandler,
|
||||
ApiBlockHandler,
|
||||
ConditionBlockHandler,
|
||||
EvaluatorBlockHandler,
|
||||
FunctionBlockHandler,
|
||||
GenericBlockHandler,
|
||||
ResponseBlockHandler,
|
||||
HumanInTheLoopBlockHandler,
|
||||
RouterBlockHandler,
|
||||
TriggerBlockHandler,
|
||||
VariablesBlockHandler,
|
||||
WaitBlockHandler,
|
||||
WorkflowBlockHandler,
|
||||
}
|
||||
@@ -2499,7 +2499,9 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||
async execute(params: EditWorkflowParams, context?: { userId: string }): Promise<any> {
|
||||
const logger = createLogger('EditWorkflowServerTool')
|
||||
const { operations, workflowId, currentUserWorkflow } = params
|
||||
if (!operations || operations.length === 0) throw new Error('operations are required')
|
||||
if (!Array.isArray(operations) || operations.length === 0) {
|
||||
throw new Error('operations are required and must be an array')
|
||||
}
|
||||
if (!workflowId) throw new Error('workflowId is required')
|
||||
|
||||
logger.info('Executing edit_workflow', {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import crypto from 'crypto'
|
||||
import {
|
||||
db,
|
||||
webhook,
|
||||
workflow,
|
||||
workflowBlocks,
|
||||
workflowDeploymentVersion,
|
||||
@@ -22,7 +21,6 @@ import { generateLoopBlocks, generateParallelBlocks } from '@/stores/workflows/w
|
||||
const logger = createLogger('WorkflowDBHelpers')
|
||||
|
||||
export type WorkflowDeploymentVersion = InferSelectModel<typeof workflowDeploymentVersion>
|
||||
type WebhookRecord = InferSelectModel<typeof webhook>
|
||||
type SubflowInsert = InferInsertModel<typeof workflowSubflows>
|
||||
|
||||
export interface WorkflowDeploymentVersionResponse {
|
||||
@@ -337,18 +335,6 @@ export async function saveWorkflowToNormalizedTables(
|
||||
|
||||
// Start a transaction
|
||||
await db.transaction(async (tx) => {
|
||||
// Snapshot existing webhooks before deletion to preserve them through the cycle
|
||||
let existingWebhooks: WebhookRecord[] = []
|
||||
try {
|
||||
existingWebhooks = await tx.select().from(webhook).where(eq(webhook.workflowId, workflowId))
|
||||
} catch (webhookError) {
|
||||
// Webhook table might not be available in test environments
|
||||
logger.debug('Could not load webhooks before save, skipping preservation', {
|
||||
error: webhookError instanceof Error ? webhookError.message : String(webhookError),
|
||||
})
|
||||
}
|
||||
|
||||
// Clear existing data for this workflow
|
||||
await Promise.all([
|
||||
tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId)),
|
||||
tx.delete(workflowEdges).where(eq(workflowEdges.workflowId, workflowId)),
|
||||
@@ -419,42 +405,6 @@ export async function saveWorkflowToNormalizedTables(
|
||||
if (subflowInserts.length > 0) {
|
||||
await tx.insert(workflowSubflows).values(subflowInserts)
|
||||
}
|
||||
|
||||
// Re-insert preserved webhooks if any exist and their blocks still exist
|
||||
if (existingWebhooks.length > 0) {
|
||||
try {
|
||||
const webhookInserts = existingWebhooks
|
||||
.filter((wh) => !!state.blocks?.[wh.blockId ?? ''])
|
||||
.map((wh) => ({
|
||||
id: wh.id,
|
||||
workflowId: wh.workflowId,
|
||||
blockId: wh.blockId,
|
||||
path: wh.path,
|
||||
provider: wh.provider,
|
||||
providerConfig: wh.providerConfig,
|
||||
credentialSetId: wh.credentialSetId,
|
||||
isActive: wh.isActive,
|
||||
createdAt: wh.createdAt,
|
||||
updatedAt: new Date(),
|
||||
}))
|
||||
|
||||
if (webhookInserts.length > 0) {
|
||||
await tx.insert(webhook).values(webhookInserts)
|
||||
logger.debug(`Preserved ${webhookInserts.length} webhook(s) through workflow save`, {
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
} catch (webhookInsertError) {
|
||||
// Webhook preservation is optional - don't fail the entire save if it errors
|
||||
logger.warn('Could not preserve webhooks during save', {
|
||||
error:
|
||||
webhookInsertError instanceof Error
|
||||
? webhookInsertError.message
|
||||
: String(webhookInsertError),
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return { success: true }
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import * as schema from '@sim/db'
|
||||
import { webhook, workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { InferSelectModel } from 'drizzle-orm'
|
||||
import { and, eq, inArray, or, sql } from 'drizzle-orm'
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
@@ -1175,14 +1174,6 @@ async function handleWorkflowOperationTx(
|
||||
parallelCount: Object.keys(parallels || {}).length,
|
||||
})
|
||||
|
||||
// Snapshot existing webhooks before deletion to preserve them through the cycle
|
||||
// (workflowBlocks has CASCADE DELETE to webhook table)
|
||||
const existingWebhooks = await tx
|
||||
.select()
|
||||
.from(webhook)
|
||||
.where(eq(webhook.workflowId, workflowId))
|
||||
|
||||
// Delete all existing blocks (this will cascade delete edges and webhooks via ON DELETE CASCADE)
|
||||
await tx.delete(workflowBlocks).where(eq(workflowBlocks.workflowId, workflowId))
|
||||
|
||||
// Delete all existing subflows
|
||||
@@ -1248,32 +1239,6 @@ async function handleWorkflowOperationTx(
|
||||
await tx.insert(workflowSubflows).values(parallelValues)
|
||||
}
|
||||
|
||||
// Re-insert preserved webhooks if any exist and their blocks still exist
|
||||
type WebhookRecord = InferSelectModel<typeof webhook>
|
||||
if (existingWebhooks.length > 0) {
|
||||
const webhookInserts = existingWebhooks
|
||||
.filter((wh: WebhookRecord) => !!blocks?.[wh.blockId ?? ''])
|
||||
.map((wh: WebhookRecord) => ({
|
||||
id: wh.id,
|
||||
workflowId: wh.workflowId,
|
||||
blockId: wh.blockId,
|
||||
path: wh.path,
|
||||
provider: wh.provider,
|
||||
providerConfig: wh.providerConfig,
|
||||
credentialSetId: wh.credentialSetId,
|
||||
isActive: wh.isActive,
|
||||
createdAt: wh.createdAt,
|
||||
updatedAt: new Date(),
|
||||
}))
|
||||
|
||||
if (webhookInserts.length > 0) {
|
||||
await tx.insert(webhook).values(webhookInserts)
|
||||
logger.debug(`Preserved ${webhookInserts.length} webhook(s) through state replacement`, {
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Successfully replaced workflow state for ${workflowId}`)
|
||||
break
|
||||
}
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
import type { GoogleVaultCreateMattersParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface GoogleVaultCreateMattersParams {
|
||||
accessToken: string
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
// matters.create
|
||||
// POST https://vault.googleapis.com/v1/matters
|
||||
export const createMattersTool: ToolConfig<GoogleVaultCreateMattersParams> = {
|
||||
id: 'create_matters',
|
||||
id: 'google_vault_create_matters',
|
||||
name: 'Vault Create Matter',
|
||||
description: 'Create a new matter in Google Vault',
|
||||
version: '1.0',
|
||||
@@ -20,9 +14,24 @@ export const createMattersTool: ToolConfig<GoogleVaultCreateMattersParams> = {
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
name: { type: 'string', required: true, visibility: 'user-only' },
|
||||
description: { type: 'string', required: false, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
name: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Name for the new matter',
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional description for the matter',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -38,7 +47,8 @@ export const createMattersTool: ToolConfig<GoogleVaultCreateMattersParams> = {
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to create matter')
|
||||
const errorMessage = data.error?.message || 'Failed to create matter'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
return { success: true, output: { matter: data } }
|
||||
},
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import type { GoogleVaultCreateMattersExportParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
// matters.exports.create
|
||||
// POST https://vault.googleapis.com/v1/matters/{matterId}/exports
|
||||
export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportParams> = {
|
||||
id: 'create_matters_export',
|
||||
name: 'Vault Create Export (by Matter)',
|
||||
id: 'google_vault_create_matters_export',
|
||||
name: 'Vault Create Export',
|
||||
description: 'Create an export in a matter',
|
||||
version: '1.0',
|
||||
|
||||
@@ -15,9 +14,24 @@ export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportP
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: true, visibility: 'user-only' },
|
||||
exportName: { type: 'string', required: true, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'The matter ID',
|
||||
},
|
||||
exportName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Name for the export (avoid special characters)',
|
||||
},
|
||||
corpus: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
@@ -36,6 +50,24 @@ export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportP
|
||||
visibility: 'user-only',
|
||||
description: 'Organization unit ID to scope export (alternative to emails)',
|
||||
},
|
||||
startTime: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Start time for date filtering (ISO 8601 format, e.g., 2024-01-01T00:00:00Z)',
|
||||
},
|
||||
endTime: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'End time for date filtering (ISO 8601 format, e.g., 2024-12-31T23:59:59Z)',
|
||||
},
|
||||
terms: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Search query terms to filter exported content',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -46,7 +78,6 @@ export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportP
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
// Handle accountEmails - can be string (comma-separated) or array
|
||||
let emails: string[] = []
|
||||
if (params.accountEmails) {
|
||||
if (Array.isArray(params.accountEmails)) {
|
||||
@@ -75,7 +106,6 @@ export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportP
|
||||
terms: params.terms || undefined,
|
||||
startTime: params.startTime || undefined,
|
||||
endTime: params.endTime || undefined,
|
||||
timeZone: params.timeZone || undefined,
|
||||
...scope,
|
||||
}
|
||||
|
||||
@@ -89,7 +119,8 @@ export const createMattersExportTool: ToolConfig<GoogleVaultCreateMattersExportP
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to create export')
|
||||
const errorMessage = data.error?.message || 'Failed to create export'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
return { success: true, output: { export: data } }
|
||||
},
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import type { GoogleVaultCreateMattersHoldsParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
// matters.holds.create
|
||||
// POST https://vault.googleapis.com/v1/matters/{matterId}/holds
|
||||
export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsParams> = {
|
||||
id: 'create_matters_holds',
|
||||
name: 'Vault Create Hold (by Matter)',
|
||||
id: 'google_vault_create_matters_holds',
|
||||
name: 'Vault Create Hold',
|
||||
description: 'Create a hold in a matter',
|
||||
version: '1.0',
|
||||
|
||||
@@ -15,9 +14,24 @@ export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsPar
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: true, visibility: 'user-only' },
|
||||
holdName: { type: 'string', required: true, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'The matter ID',
|
||||
},
|
||||
holdName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'Name for the hold',
|
||||
},
|
||||
corpus: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
@@ -36,6 +50,30 @@ export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsPar
|
||||
visibility: 'user-only',
|
||||
description: 'Organization unit ID to put on hold (alternative to accounts)',
|
||||
},
|
||||
terms: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Search terms to filter held content (for MAIL and GROUPS corpus)',
|
||||
},
|
||||
startTime: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Start time for date filtering (ISO 8601 format, for MAIL and GROUPS corpus)',
|
||||
},
|
||||
endTime: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'End time for date filtering (ISO 8601 format, for MAIL and GROUPS corpus)',
|
||||
},
|
||||
includeSharedDrives: {
|
||||
type: 'boolean',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Include files in shared drives (for DRIVE corpus)',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -46,13 +84,11 @@ export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsPar
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
body: (params) => {
|
||||
// Build Hold body. One of accounts or orgUnit must be provided.
|
||||
const body: any = {
|
||||
name: params.holdName,
|
||||
corpus: params.corpus,
|
||||
}
|
||||
|
||||
// Handle accountEmails - can be string (comma-separated) or array
|
||||
let emails: string[] = []
|
||||
if (params.accountEmails) {
|
||||
if (Array.isArray(params.accountEmails)) {
|
||||
@@ -66,12 +102,29 @@ export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsPar
|
||||
}
|
||||
|
||||
if (emails.length > 0) {
|
||||
// Google Vault expects HeldAccount objects with 'email' or 'accountId'. Use 'email' here.
|
||||
body.accounts = emails.map((email: string) => ({ email }))
|
||||
} else if (params.orgUnitId) {
|
||||
body.orgUnit = { orgUnitId: params.orgUnitId }
|
||||
}
|
||||
|
||||
if (params.corpus === 'MAIL' || params.corpus === 'GROUPS') {
|
||||
const hasQueryParams = params.terms || params.startTime || params.endTime
|
||||
if (hasQueryParams) {
|
||||
const queryObj: any = {}
|
||||
if (params.terms) queryObj.terms = params.terms
|
||||
if (params.startTime) queryObj.startTime = params.startTime
|
||||
if (params.endTime) queryObj.endTime = params.endTime
|
||||
|
||||
if (params.corpus === 'MAIL') {
|
||||
body.query = { mailQuery: queryObj }
|
||||
} else {
|
||||
body.query = { groupsQuery: queryObj }
|
||||
}
|
||||
}
|
||||
} else if (params.corpus === 'DRIVE' && params.includeSharedDrives) {
|
||||
body.query = { driveQuery: { includeSharedDriveFiles: params.includeSharedDrives } }
|
||||
}
|
||||
|
||||
return body
|
||||
},
|
||||
},
|
||||
@@ -79,7 +132,8 @@ export const createMattersHoldsTool: ToolConfig<GoogleVaultCreateMattersHoldsPar
|
||||
transformResponse: async (response: Response) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to create hold')
|
||||
const errorMessage = data.error?.message || 'Failed to create hold'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
return { success: true, output: { hold: data } }
|
||||
},
|
||||
|
||||
@@ -1,17 +1,8 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { GoogleVaultDownloadExportFileParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('GoogleVaultDownloadExportFileTool')
|
||||
|
||||
interface DownloadParams {
|
||||
accessToken: string
|
||||
matterId: string
|
||||
bucketName: string
|
||||
objectName: string
|
||||
fileName?: string
|
||||
}
|
||||
|
||||
export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
export const downloadExportFileTool: ToolConfig<GoogleVaultDownloadExportFileParams> = {
|
||||
id: 'google_vault_download_export_file',
|
||||
name: 'Vault Download Export File',
|
||||
description: 'Download a single file from a Google Vault export (GCS object)',
|
||||
@@ -23,28 +14,51 @@ export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: true, visibility: 'user-only' },
|
||||
bucketName: { type: 'string', required: true, visibility: 'user-only' },
|
||||
objectName: { type: 'string', required: true, visibility: 'user-only' },
|
||||
fileName: { type: 'string', required: false, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'The matter ID',
|
||||
},
|
||||
bucketName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'GCS bucket name from cloudStorageSink.files.bucketName',
|
||||
},
|
||||
objectName: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'GCS object name from cloudStorageSink.files.objectName',
|
||||
},
|
||||
fileName: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional filename override for the downloaded file',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
url: (params) => {
|
||||
const bucket = encodeURIComponent(params.bucketName)
|
||||
const object = encodeURIComponent(params.objectName)
|
||||
// Use GCS media endpoint directly; framework will prefetch token and inject accessToken
|
||||
return `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
},
|
||||
method: 'GET',
|
||||
headers: (params) => ({
|
||||
// Access token is injected by the tools framework when 'credential' is present
|
||||
Authorization: `Bearer ${params.accessToken}`,
|
||||
}),
|
||||
},
|
||||
|
||||
transformResponse: async (response: Response, params?: DownloadParams) => {
|
||||
transformResponse: async (response: Response, params?: GoogleVaultDownloadExportFileParams) => {
|
||||
if (!response.ok) {
|
||||
let details: any
|
||||
try {
|
||||
@@ -57,10 +71,11 @@ export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
details = undefined
|
||||
}
|
||||
}
|
||||
throw new Error(details?.error || `Failed to download Vault export file (${response.status})`)
|
||||
const errorMessage =
|
||||
details?.error || `Failed to download Vault export file (${response.status})`
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
|
||||
// Since we're just doing a HEAD request to verify access, we need to fetch the actual file
|
||||
if (!params?.accessToken || !params?.bucketName || !params?.objectName) {
|
||||
throw new Error('Missing required parameters for download')
|
||||
}
|
||||
@@ -69,7 +84,6 @@ export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
const object = encodeURIComponent(params.objectName)
|
||||
const downloadUrl = `https://storage.googleapis.com/storage/v1/b/${bucket}/o/${object}?alt=media`
|
||||
|
||||
// Fetch the actual file content
|
||||
const downloadResponse = await fetch(downloadUrl, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
@@ -79,7 +93,8 @@ export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
|
||||
if (!downloadResponse.ok) {
|
||||
const errorText = await downloadResponse.text().catch(() => '')
|
||||
throw new Error(`Failed to download file: ${errorText || downloadResponse.statusText}`)
|
||||
const errorMessage = `Failed to download file: ${errorText || downloadResponse.statusText}`
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
|
||||
const contentType = downloadResponse.headers.get('content-type') || 'application/octet-stream'
|
||||
@@ -104,7 +119,6 @@ export const downloadExportFileTool: ToolConfig<DownloadParams> = {
|
||||
}
|
||||
}
|
||||
|
||||
// Get the file as an array buffer and convert to Buffer
|
||||
const arrayBuffer = await downloadResponse.arrayBuffer()
|
||||
const buffer = Buffer.from(arrayBuffer)
|
||||
|
||||
|
||||
@@ -1,14 +1,9 @@
|
||||
import type { GoogleVaultListMattersParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export interface GoogleVaultListMattersParams {
|
||||
accessToken: string
|
||||
pageSize?: number
|
||||
pageToken?: string
|
||||
matterId?: string // Optional get for a specific matter
|
||||
}
|
||||
|
||||
export const listMattersTool: ToolConfig<GoogleVaultListMattersParams> = {
|
||||
id: 'list_matters',
|
||||
id: 'google_vault_list_matters',
|
||||
name: 'Vault List Matters',
|
||||
description: 'List matters, or get a specific matter if matterId is provided',
|
||||
version: '1.0',
|
||||
@@ -19,10 +14,30 @@ export const listMattersTool: ToolConfig<GoogleVaultListMattersParams> = {
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
pageSize: { type: 'number', required: false, visibility: 'user-only' },
|
||||
pageToken: { type: 'string', required: false, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: false, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
pageSize: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Number of matters to return per page',
|
||||
},
|
||||
pageToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Token for pagination',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional matter ID to fetch a specific matter',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -47,7 +62,8 @@ export const listMattersTool: ToolConfig<GoogleVaultListMattersParams> = {
|
||||
transformResponse: async (response: Response, params?: GoogleVaultListMattersParams) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to list matters')
|
||||
const errorMessage = data.error?.message || 'Failed to list matters'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
if (params?.matterId) {
|
||||
return { success: true, output: { matter: data } }
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import type { GoogleVaultListMattersExportParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listMattersExportTool: ToolConfig<GoogleVaultListMattersExportParams> = {
|
||||
id: 'list_matters_export',
|
||||
name: 'Vault List Exports (by Matter)',
|
||||
id: 'google_vault_list_matters_export',
|
||||
name: 'Vault List Exports',
|
||||
description: 'List exports for a matter',
|
||||
version: '1.0',
|
||||
|
||||
@@ -13,11 +14,36 @@ export const listMattersExportTool: ToolConfig<GoogleVaultListMattersExportParam
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: true, visibility: 'user-only' },
|
||||
pageSize: { type: 'number', required: false, visibility: 'user-only' },
|
||||
pageToken: { type: 'string', required: false, visibility: 'hidden' },
|
||||
exportId: { type: 'string', required: false, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'The matter ID',
|
||||
},
|
||||
pageSize: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Number of exports to return per page',
|
||||
},
|
||||
pageToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Token for pagination',
|
||||
},
|
||||
exportId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional export ID to fetch a specific export',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -42,7 +68,8 @@ export const listMattersExportTool: ToolConfig<GoogleVaultListMattersExportParam
|
||||
transformResponse: async (response: Response, params?: GoogleVaultListMattersExportParams) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to list exports')
|
||||
const errorMessage = data.error?.message || 'Failed to list exports'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
if (params?.exportId) {
|
||||
return { success: true, output: { export: data } }
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import type { GoogleVaultListMattersHoldsParams } from '@/tools/google_vault/types'
|
||||
import { enhanceGoogleVaultError } from '@/tools/google_vault/utils'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
export const listMattersHoldsTool: ToolConfig<GoogleVaultListMattersHoldsParams> = {
|
||||
id: 'list_matters_holds',
|
||||
name: 'Vault List Holds (by Matter)',
|
||||
id: 'google_vault_list_matters_holds',
|
||||
name: 'Vault List Holds',
|
||||
description: 'List holds for a matter',
|
||||
version: '1.0',
|
||||
|
||||
@@ -13,11 +14,36 @@ export const listMattersHoldsTool: ToolConfig<GoogleVaultListMattersHoldsParams>
|
||||
},
|
||||
|
||||
params: {
|
||||
accessToken: { type: 'string', required: true, visibility: 'hidden' },
|
||||
matterId: { type: 'string', required: true, visibility: 'user-only' },
|
||||
pageSize: { type: 'number', required: false, visibility: 'user-only' },
|
||||
pageToken: { type: 'string', required: false, visibility: 'hidden' },
|
||||
holdId: { type: 'string', required: false, visibility: 'user-only' },
|
||||
accessToken: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'hidden',
|
||||
description: 'OAuth access token',
|
||||
},
|
||||
matterId: {
|
||||
type: 'string',
|
||||
required: true,
|
||||
visibility: 'user-only',
|
||||
description: 'The matter ID',
|
||||
},
|
||||
pageSize: {
|
||||
type: 'number',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Number of holds to return per page',
|
||||
},
|
||||
pageToken: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'hidden',
|
||||
description: 'Token for pagination',
|
||||
},
|
||||
holdId: {
|
||||
type: 'string',
|
||||
required: false,
|
||||
visibility: 'user-only',
|
||||
description: 'Optional hold ID to fetch a specific hold',
|
||||
},
|
||||
},
|
||||
|
||||
request: {
|
||||
@@ -42,7 +68,8 @@ export const listMattersHoldsTool: ToolConfig<GoogleVaultListMattersHoldsParams>
|
||||
transformResponse: async (response: Response, params?: GoogleVaultListMattersHoldsParams) => {
|
||||
const data = await response.json()
|
||||
if (!response.ok) {
|
||||
throw new Error(data.error?.message || 'Failed to list holds')
|
||||
const errorMessage = data.error?.message || 'Failed to list holds'
|
||||
throw new Error(enhanceGoogleVaultError(errorMessage))
|
||||
}
|
||||
if (params?.holdId) {
|
||||
return { success: true, output: { hold: data } }
|
||||
|
||||
@@ -5,31 +5,48 @@ export interface GoogleVaultCommonParams {
|
||||
matterId: string
|
||||
}
|
||||
|
||||
// Exports
|
||||
export interface GoogleVaultCreateMattersParams {
|
||||
accessToken: string
|
||||
name: string
|
||||
description?: string
|
||||
}
|
||||
|
||||
export interface GoogleVaultListMattersParams {
|
||||
accessToken: string
|
||||
pageSize?: number
|
||||
pageToken?: string
|
||||
matterId?: string
|
||||
}
|
||||
|
||||
export interface GoogleVaultDownloadExportFileParams {
|
||||
accessToken: string
|
||||
matterId: string
|
||||
bucketName: string
|
||||
objectName: string
|
||||
fileName?: string
|
||||
}
|
||||
|
||||
export interface GoogleVaultCreateMattersExportParams extends GoogleVaultCommonParams {
|
||||
exportName: string
|
||||
corpus: GoogleVaultCorpus
|
||||
accountEmails?: string // Comma-separated list or array handled in the tool
|
||||
accountEmails?: string
|
||||
orgUnitId?: string
|
||||
terms?: string
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
timeZone?: string
|
||||
includeSharedDrives?: boolean
|
||||
}
|
||||
|
||||
export interface GoogleVaultListMattersExportParams extends GoogleVaultCommonParams {
|
||||
pageSize?: number
|
||||
pageToken?: string
|
||||
exportId?: string // Short input to fetch a specific export
|
||||
exportId?: string
|
||||
}
|
||||
|
||||
export interface GoogleVaultListMattersExportResponse extends ToolResponse {
|
||||
output: any
|
||||
}
|
||||
|
||||
// Holds
|
||||
// Simplified: default to BASIC_HOLD by omission in requests
|
||||
export type GoogleVaultHoldView = 'BASIC_HOLD' | 'FULL_HOLD'
|
||||
|
||||
export type GoogleVaultCorpus = 'MAIL' | 'DRIVE' | 'GROUPS' | 'HANGOUTS_CHAT' | 'VOICE'
|
||||
@@ -37,14 +54,18 @@ export type GoogleVaultCorpus = 'MAIL' | 'DRIVE' | 'GROUPS' | 'HANGOUTS_CHAT' |
|
||||
export interface GoogleVaultCreateMattersHoldsParams extends GoogleVaultCommonParams {
|
||||
holdName: string
|
||||
corpus: GoogleVaultCorpus
|
||||
accountEmails?: string // Comma-separated list or array handled in the tool
|
||||
accountEmails?: string
|
||||
orgUnitId?: string
|
||||
terms?: string
|
||||
startTime?: string
|
||||
endTime?: string
|
||||
includeSharedDrives?: boolean
|
||||
}
|
||||
|
||||
export interface GoogleVaultListMattersHoldsParams extends GoogleVaultCommonParams {
|
||||
pageSize?: number
|
||||
pageToken?: string
|
||||
holdId?: string // Short input to fetch a specific hold
|
||||
holdId?: string
|
||||
}
|
||||
|
||||
export interface GoogleVaultListMattersHoldsResponse extends ToolResponse {
|
||||
|
||||
41
apps/sim/tools/google_vault/utils.ts
Normal file
41
apps/sim/tools/google_vault/utils.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
/**
|
||||
* Google Vault Error Enhancement Utilities
|
||||
*
|
||||
* Provides user-friendly error messages for common Google Vault authentication
|
||||
* and credential issues, particularly RAPT (reauthentication policy) errors.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Detects if an error message indicates a credential/reauthentication issue
|
||||
*/
|
||||
function isCredentialRefreshError(errorMessage: string): boolean {
|
||||
const lowerMessage = errorMessage.toLowerCase()
|
||||
return (
|
||||
lowerMessage.includes('invalid_rapt') ||
|
||||
lowerMessage.includes('reauth related error') ||
|
||||
(lowerMessage.includes('invalid_grant') && lowerMessage.includes('rapt')) ||
|
||||
lowerMessage.includes('failed to refresh token') ||
|
||||
(lowerMessage.includes('failed to fetch access token') && lowerMessage.includes('401'))
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Enhances Google Vault error messages with actionable guidance
|
||||
*
|
||||
* For credential/reauthentication errors (RAPT errors), provides specific
|
||||
* instructions for resolving the issue through Google Admin Console settings.
|
||||
*/
|
||||
export function enhanceGoogleVaultError(errorMessage: string): string {
|
||||
if (isCredentialRefreshError(errorMessage)) {
|
||||
return (
|
||||
`Google Vault authentication failed (likely due to reauthentication policy). ` +
|
||||
`To resolve this, try disconnecting and reconnecting your Google Vault credential ` +
|
||||
`in the Credentials settings. If the issue persists, ask your Google Workspace ` +
|
||||
`administrator to disable "Reauthentication policy" for Sim Studio in the Google ` +
|
||||
`Admin Console (Security > Access and data control > Context-Aware Access > ` +
|
||||
`Reauthentication policy), or exempt Sim Studio from reauthentication requirements. ` +
|
||||
`Learn more: https://support.google.com/a/answer/9368756`
|
||||
)
|
||||
}
|
||||
return errorMessage
|
||||
}
|
||||
Reference in New Issue
Block a user