From cf023e4d22332e2047ae95a0e2ff6f558fc666c8 Mon Sep 17 00:00:00 2001 From: Waleed Date: Wed, 5 Nov 2025 13:00:34 -0800 Subject: [PATCH] feat(tools): added download file tool for onedrive, google drive, and slack; added move email tool for gmail and outlook (#1785) * feat(tools): added download file tool for onedrive, google drive, and slack * added gmail & outlook move tools, added missing credentials descriptions to modal * added slack delete/update message, add reaction; added gmail read/unread/label/unarchive; added outlook copy/delete/read/unread * added threads to slack operations * added timestamp for slack webhook trigger since api uses timestamp for updating/reacting/deleting * cleanup * added file info to slack read messages * updated slack desc * fixed downloading for onedrive, slack, and drive * fix type check * fix build failure * cleanup files, fix triggers with attachments, fix integration blocks with include attachment to parse to user files, remove unused code * fix move files tools * fix tests * fix build errors * fix type error * fix tests * remove redundant code and filter out unecessary user file fields * fix lint error * remove fields from tag dropdown * fix file upload via API * fix pdf parse issue --------- Co-authored-by: waleed Co-authored-by: Adam Gough Co-authored-by: Vikhyath Mondreti --- apps/docs/content/docs/en/tools/gmail.mdx | 154 ++++++++++- .../content/docs/en/tools/google_drive.mdx | 18 ++ apps/docs/content/docs/en/tools/onedrive.mdx | 17 ++ apps/docs/content/docs/en/tools/outlook.mdx | 112 +++++++- apps/docs/content/docs/en/tools/slack.mdx | 101 +++++++- apps/sim/app/api/__test-utils__/utils.ts | 13 + apps/sim/app/api/copilot/chat/route.test.ts | 23 +- apps/sim/app/api/files/authorization.ts | 31 ++- apps/sim/app/api/files/delete/route.test.ts | 16 +- .../execution/[executionId]/[fileId]/route.ts | 124 --------- .../app/api/files/presigned/batch/route.ts | 23 +- .../sim/app/api/files/presigned/route.test.ts | 14 +- apps/sim/app/api/files/presigned/route.ts | 20 +- .../api/files/serve/[...path]/route.test.ts | 41 ++- apps/sim/app/api/files/upload/route.test.ts | 74 +++++- apps/sim/app/api/files/upload/route.ts | 105 +++----- apps/sim/app/api/files/utils.ts | 5 +- .../[id]/documents/[documentId]/route.test.ts | 9 +- apps/sim/app/api/logs/cleanup/route.ts | 25 +- apps/sim/app/api/proxy/route.ts | 7 +- apps/sim/app/api/proxy/tts/route.ts | 59 ++++- .../app/api/tools/gmail/add-label/route.ts | 117 +++++++++ apps/sim/app/api/tools/gmail/archive/route.ts | 110 ++++++++ apps/sim/app/api/tools/gmail/delete/route.ts | 107 ++++++++ .../app/api/tools/gmail/mark-read/route.ts | 110 ++++++++ .../app/api/tools/gmail/mark-unread/route.ts | 113 ++++++++ apps/sim/app/api/tools/gmail/move/route.ts | 134 ++++++++++ .../app/api/tools/gmail/remove-label/route.ts | 120 +++++++++ .../app/api/tools/gmail/unarchive/route.ts | 110 ++++++++ .../sim/app/api/tools/onedrive/files/route.ts | 165 ++++++++++++ apps/sim/app/api/tools/outlook/copy/route.ts | 112 ++++++++ .../sim/app/api/tools/outlook/delete/route.ts | 101 ++++++++ .../app/api/tools/outlook/mark-read/route.ts | 111 ++++++++ .../api/tools/outlook/mark-unread/route.ts | 111 ++++++++ apps/sim/app/api/tools/outlook/move/route.ts | 110 ++++++++ .../app/api/tools/slack/add-reaction/route.ts | 116 +++++++++ .../api/tools/slack/delete-message/route.ts | 111 ++++++++ .../app/api/tools/slack/send-message/route.ts | 4 + .../api/tools/slack/update-message/route.ts | 114 +++++++++ .../app/api/workflows/[id]/execute/route.ts | 65 ++++- .../logs/components/sidebar/sidebar.tsx | 30 +-- .../tool-calls/tool-calls-display.tsx | 23 +- .../components/json-view/json-view.tsx | 6 +- .../components/oauth-required-modal.tsx | 29 ++- .../components/microsoft-file-selector.tsx | 27 +- .../file-selector/file-selector-input.tsx | 1 + .../components/folder-selector-input.tsx | 40 ++- .../hooks/use-workflow-execution.ts | 9 +- apps/sim/background/webhook-execution.ts | 41 ++- apps/sim/blocks/blocks/gmail.ts | 242 +++++++++++++++++- apps/sim/blocks/blocks/google_drive.ts | 94 ++++++- apps/sim/blocks/blocks/onedrive.ts | 68 ++++- apps/sim/blocks/blocks/outlook.ts | 181 ++++++++++++- apps/sim/blocks/blocks/slack.ts | 204 ++++++++++++++- apps/sim/components/ui/tag-dropdown.tsx | 3 +- apps/sim/executor/execution/executor.ts | 1 + apps/sim/executor/execution/types.ts | 1 + apps/sim/executor/handlers/api/api-handler.ts | 1 + .../handlers/generic/generic-handler.ts | 1 + apps/sim/executor/types.ts | 3 +- .../sim/executor/utils/file-tool-processor.ts | 94 ++----- apps/sim/executor/utils/start-block.ts | 15 +- apps/sim/lib/auth.ts | 2 + apps/sim/lib/execution/files.ts | 91 ++++++- apps/sim/lib/file-parsers/pdf-parser.ts | 19 +- apps/sim/lib/logs/execution/logger.ts | 18 +- apps/sim/lib/logs/search-suggestions.test.ts | 36 ++- apps/sim/lib/logs/types.ts | 4 - .../execution/execution-file-helpers.ts | 57 ++--- .../execution/execution-file-manager.ts | 131 ++++++---- .../execution/execution-file-server.ts | 135 ---------- .../lib/uploads/contexts/execution/index.ts | 1 - .../workspace/workspace-file-manager.ts | 48 +++- apps/sim/lib/uploads/core/storage-service.ts | 2 +- apps/sim/lib/uploads/shared/types.ts | 2 +- .../lib/uploads/utils/file-utils.server.ts | 19 +- apps/sim/lib/uploads/utils/file-utils.ts | 90 ++++--- apps/sim/lib/utils.ts | 86 +++++++ apps/sim/lib/webhooks/attachment-processor.ts | 52 +--- apps/sim/lib/workflows/block-outputs.ts | 14 +- .../lib/workflows/executor/execution-core.ts | 3 +- apps/sim/package.json | 2 +- apps/sim/tools/elevenlabs/tts.ts | 11 +- apps/sim/tools/gmail/add_label.ts | 85 ++++++ apps/sim/tools/gmail/archive.ts | 78 ++++++ apps/sim/tools/gmail/delete.ts | 78 ++++++ apps/sim/tools/gmail/index.ts | 23 +- apps/sim/tools/gmail/mark_read.ts | 78 ++++++ apps/sim/tools/gmail/mark_unread.ts | 78 ++++++ apps/sim/tools/gmail/move.ts | 92 +++++++ apps/sim/tools/gmail/remove_label.ts | 85 ++++++ apps/sim/tools/gmail/types.ts | 26 +- apps/sim/tools/gmail/unarchive.ts | 78 ++++++ apps/sim/tools/google_drive/download.ts | 171 +++++++++++++ apps/sim/tools/google_drive/index.ts | 2 + apps/sim/tools/google_drive/types.ts | 12 + apps/sim/tools/onedrive/download.ts | 131 ++++++++++ apps/sim/tools/onedrive/index.ts | 2 + apps/sim/tools/onedrive/types.ts | 27 +- apps/sim/tools/outlook/copy.ts | 74 ++++++ apps/sim/tools/outlook/delete.ts | 65 +++++ apps/sim/tools/outlook/index.ts | 17 +- apps/sim/tools/outlook/mark_read.ts | 65 +++++ apps/sim/tools/outlook/mark_unread.ts | 65 +++++ apps/sim/tools/outlook/move.ts | 72 ++++++ apps/sim/tools/outlook/types.ts | 71 ++++- apps/sim/tools/registry.ts | 57 ++++- apps/sim/tools/slack/add_reaction.ts | 108 ++++++++ apps/sim/tools/slack/delete_message.ts | 102 ++++++++ apps/sim/tools/slack/download.ts | 151 +++++++++++ apps/sim/tools/slack/index.ts | 14 +- apps/sim/tools/slack/message.ts | 7 + apps/sim/tools/slack/message_reader.ts | 20 ++ apps/sim/tools/slack/types.ts | 81 +++++- apps/sim/tools/slack/update_message.ts | 111 ++++++++ apps/sim/triggers/slack/webhook.ts | 7 +- bun.lock | 6 +- 117 files changed, 6221 insertions(+), 887 deletions(-) delete mode 100644 apps/sim/app/api/files/execution/[executionId]/[fileId]/route.ts create mode 100644 apps/sim/app/api/tools/gmail/add-label/route.ts create mode 100644 apps/sim/app/api/tools/gmail/archive/route.ts create mode 100644 apps/sim/app/api/tools/gmail/delete/route.ts create mode 100644 apps/sim/app/api/tools/gmail/mark-read/route.ts create mode 100644 apps/sim/app/api/tools/gmail/mark-unread/route.ts create mode 100644 apps/sim/app/api/tools/gmail/move/route.ts create mode 100644 apps/sim/app/api/tools/gmail/remove-label/route.ts create mode 100644 apps/sim/app/api/tools/gmail/unarchive/route.ts create mode 100644 apps/sim/app/api/tools/onedrive/files/route.ts create mode 100644 apps/sim/app/api/tools/outlook/copy/route.ts create mode 100644 apps/sim/app/api/tools/outlook/delete/route.ts create mode 100644 apps/sim/app/api/tools/outlook/mark-read/route.ts create mode 100644 apps/sim/app/api/tools/outlook/mark-unread/route.ts create mode 100644 apps/sim/app/api/tools/outlook/move/route.ts create mode 100644 apps/sim/app/api/tools/slack/add-reaction/route.ts create mode 100644 apps/sim/app/api/tools/slack/delete-message/route.ts create mode 100644 apps/sim/app/api/tools/slack/update-message/route.ts delete mode 100644 apps/sim/lib/uploads/contexts/execution/execution-file-server.ts create mode 100644 apps/sim/tools/gmail/add_label.ts create mode 100644 apps/sim/tools/gmail/archive.ts create mode 100644 apps/sim/tools/gmail/delete.ts create mode 100644 apps/sim/tools/gmail/mark_read.ts create mode 100644 apps/sim/tools/gmail/mark_unread.ts create mode 100644 apps/sim/tools/gmail/move.ts create mode 100644 apps/sim/tools/gmail/remove_label.ts create mode 100644 apps/sim/tools/gmail/unarchive.ts create mode 100644 apps/sim/tools/google_drive/download.ts create mode 100644 apps/sim/tools/onedrive/download.ts create mode 100644 apps/sim/tools/outlook/copy.ts create mode 100644 apps/sim/tools/outlook/delete.ts create mode 100644 apps/sim/tools/outlook/mark_read.ts create mode 100644 apps/sim/tools/outlook/mark_unread.ts create mode 100644 apps/sim/tools/outlook/move.ts create mode 100644 apps/sim/tools/slack/add_reaction.ts create mode 100644 apps/sim/tools/slack/delete_message.ts create mode 100644 apps/sim/tools/slack/download.ts create mode 100644 apps/sim/tools/slack/update_message.ts diff --git a/apps/docs/content/docs/en/tools/gmail.mdx b/apps/docs/content/docs/en/tools/gmail.mdx index 447f870aa..0d25f0b2f 100644 --- a/apps/docs/content/docs/en/tools/gmail.mdx +++ b/apps/docs/content/docs/en/tools/gmail.mdx @@ -1,6 +1,6 @@ --- title: Gmail -description: Send Gmail or trigger workflows from Gmail events +description: Send, read, search, and move Gmail messages or trigger workflows from Gmail events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -45,13 +45,21 @@ With Gmail, you can: - **Access from anywhere**: Use Gmail across devices with synchronized content and settings - **Integrate with other services**: Connect with Google Calendar, Drive, and other productivity tools -In Sim, the Gmail integration enables your agents to send, read, and search emails programmatically. This allows for powerful automation scenarios such as sending notifications, processing incoming messages, extracting information from emails, and managing communication workflows. Your agents can compose and send personalized emails, search for specific messages using Gmail's query syntax, and extract content from emails to use in other parts of your workflow. Coming soon, agents will also be able to listen for new emails in real-time, enabling responsive workflows that can trigger actions based on incoming messages. This integration bridges the gap between your AI workflows and email communications, enabling seamless interaction with one of the world's most widely used communication platforms. +In Sim, the Gmail integration enables your agents to fully manage emails programmatically with comprehensive automation capabilities. This allows for powerful automation scenarios such as sending notifications, processing incoming messages, extracting information from emails, and managing communication workflows at scale. Your agents can: + +- **Compose and send**: Create personalized emails with attachments and send to recipients +- **Read and search**: Find specific messages using Gmail's query syntax and extract content +- **Organize intelligently**: Mark messages as read/unread, archive or unarchive emails, and manage labels +- **Clean up inbox**: Delete messages, move emails between labels, and maintain inbox zero +- **Trigger workflows**: Listen for new emails in real-time, enabling responsive workflows that react to incoming messages + +This integration bridges the gap between your AI workflows and email communications, enabling seamless interaction with one of the world's most widely used communication platforms. Whether you're automating customer support responses, processing receipts, managing subscriptions, or coordinating team communications, the Gmail integration provides all the tools you need for comprehensive email automation. {/* MANUAL-CONTENT-END */} ## Usage Instructions -Integrate Gmail into the workflow. Can send, read, and search emails. Can be used in trigger mode to trigger a workflow when a new email is received. +Integrate Gmail into the workflow. Can send, read, search, and move emails. Can be used in trigger mode to trigger a workflow when a new email is received. @@ -145,6 +153,146 @@ Search emails in Gmail | `content` | string | Search results summary | | `metadata` | object | Search metadata | +### `gmail_move` + +Move emails between Gmail labels/folders + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to move | +| `addLabelIds` | string | Yes | Comma-separated label IDs to add \(e.g., INBOX, Label_123\) | +| `removeLabelIds` | string | No | Comma-separated label IDs to remove \(e.g., INBOX, SPAM\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_mark_read` + +Mark a Gmail message as read + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to mark as read | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_mark_unread` + +Mark a Gmail message as unread + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to mark as unread | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_archive` + +Archive a Gmail message (remove from inbox) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to archive | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_unarchive` + +Unarchive a Gmail message (move back to inbox) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to unarchive | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_delete` + +Delete a Gmail message (move to trash) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_add_label` + +Add label(s) to a Gmail message + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to add labels to | +| `labelIds` | string | Yes | Comma-separated label IDs to add \(e.g., INBOX, Label_123\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + +### `gmail_remove_label` + +Remove label(s) from a Gmail message + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to remove labels from | +| `labelIds` | string | Yes | Comma-separated label IDs to remove \(e.g., INBOX, Label_123\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Email metadata | + ## Notes diff --git a/apps/docs/content/docs/en/tools/google_drive.mdx b/apps/docs/content/docs/en/tools/google_drive.mdx index 5f1b8b600..88593ac5d 100644 --- a/apps/docs/content/docs/en/tools/google_drive.mdx +++ b/apps/docs/content/docs/en/tools/google_drive.mdx @@ -118,6 +118,24 @@ Create a new folder in Google Drive | --------- | ---- | ----------- | | `file` | json | Created folder metadata including ID, name, and parent information | +### `google_drive_download` + +Download a file from Google Drive (exports Google Workspace files automatically) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `fileId` | string | Yes | The ID of the file to download | +| `mimeType` | string | No | The MIME type to export Google Workspace files to \(optional\) | +| `fileName` | string | No | Optional filename override | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `file` | file | Downloaded file stored in execution files | + ### `google_drive_list` List files and folders in Google Drive diff --git a/apps/docs/content/docs/en/tools/onedrive.mdx b/apps/docs/content/docs/en/tools/onedrive.mdx index 5d5bbf123..5f80ebd95 100644 --- a/apps/docs/content/docs/en/tools/onedrive.mdx +++ b/apps/docs/content/docs/en/tools/onedrive.mdx @@ -98,6 +98,23 @@ Create a new folder in OneDrive | `success` | boolean | Whether the folder was created successfully | | `file` | object | The created folder object with metadata including id, name, webViewLink, and timestamps | +### `onedrive_download` + +Download a file from OneDrive + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `fileId` | string | Yes | The ID of the file to download | +| `fileName` | string | No | Optional filename override | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `file` | file | Downloaded file stored in execution files | + ### `onedrive_list` List files and folders in OneDrive diff --git a/apps/docs/content/docs/en/tools/outlook.mdx b/apps/docs/content/docs/en/tools/outlook.mdx index b1c7bf0a6..0f6ef29dc 100644 --- a/apps/docs/content/docs/en/tools/outlook.mdx +++ b/apps/docs/content/docs/en/tools/outlook.mdx @@ -1,6 +1,6 @@ --- title: Outlook -description: Access Outlook +description: Send, read, draft, forward, and move Outlook email messages --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -134,13 +134,21 @@ With Microsoft Outlook, you can: - **Access across devices**: Use Outlook on desktop, web, and mobile with real-time sync - **Maintain privacy and security**: Leverage enterprise-grade encryption and compliance controls -In Sim, the Microsoft Outlook integration enables your agents to interact directly with email and calendar data programmatically. This allows for powerful automation scenarios such as sending custom email updates, parsing incoming messages for workflow triggers, creating calendar events, and managing task reminders. By connecting Sim with Microsoft Outlook, you enable intelligent agents to automate communications, streamline scheduling, and maintain visibility into organizational correspondence — all within your workflow ecosystem. +In Sim, the Microsoft Outlook integration enables your agents to interact directly with email and calendar data programmatically with full email management capabilities. This allows for powerful automation scenarios across your entire email workflow. Your agents can: + +- **Send and draft**: Compose professional emails with attachments and save drafts for later +- **Read and forward**: Access inbox messages and forward important communications to team members +- **Organize efficiently**: Mark emails as read or unread, move messages between folders, and copy emails for reference +- **Clean up inbox**: Delete unwanted messages and maintain organized folder structures +- **Trigger workflows**: React to new emails in real-time, enabling responsive automation based on incoming messages + +By connecting Sim with Microsoft Outlook, you enable intelligent agents to automate communications, streamline scheduling, maintain visibility into organizational correspondence, and keep inboxes organized — all within your workflow ecosystem. Whether you're managing customer communications, processing invoices, coordinating team updates, or automating follow-ups, the Outlook integration provides enterprise-grade email automation capabilities. {/* MANUAL-CONTENT-END */} ## Usage Instructions -Integrate Outlook into the workflow. Can read, draft, and send email messages. Can be used in trigger mode to trigger a workflow when a new email is received. +Integrate Outlook into the workflow. Can read, draft, send, forward, and move email messages. Can be used in trigger mode to trigger a workflow when a new email is received. @@ -237,6 +245,104 @@ Forward an existing Outlook message to specified recipients | `message` | string | Success or error message | | `results` | object | Delivery result details | +### `outlook_move` + +Move emails between Outlook folders + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to move | +| `destinationId` | string | Yes | ID of the destination folder | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Email move success status | +| `message` | string | Success or error message | +| `messageId` | string | ID of the moved message | +| `newFolderId` | string | ID of the destination folder | + +### `outlook_mark_read` + +Mark an Outlook message as read + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to mark as read | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Operation success status | +| `message` | string | Success or error message | +| `messageId` | string | ID of the message | +| `isRead` | boolean | Read status of the message | + +### `outlook_mark_unread` + +Mark an Outlook message as unread + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to mark as unread | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Operation success status | +| `message` | string | Success or error message | +| `messageId` | string | ID of the message | +| `isRead` | boolean | Read status of the message | + +### `outlook_delete` + +Delete an Outlook message (move to Deleted Items) + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to delete | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Operation success status | +| `message` | string | Success or error message | +| `messageId` | string | ID of the deleted message | +| `status` | string | Deletion status | + +### `outlook_copy` + +Copy an Outlook message to another folder + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `messageId` | string | Yes | ID of the message to copy | +| `destinationId` | string | Yes | ID of the destination folder | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `success` | boolean | Email copy success status | +| `message` | string | Success or error message | +| `originalMessageId` | string | ID of the original message | +| `copiedMessageId` | string | ID of the copied message | +| `destinationFolderId` | string | ID of the destination folder | + ## Notes diff --git a/apps/docs/content/docs/en/tools/slack.mdx b/apps/docs/content/docs/en/tools/slack.mdx index 5fd70d7b7..d74ffe1f8 100644 --- a/apps/docs/content/docs/en/tools/slack.mdx +++ b/apps/docs/content/docs/en/tools/slack.mdx @@ -1,6 +1,6 @@ --- title: Slack -description: Send messages to Slack or trigger workflows from Slack events +description: Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events --- import { BlockInfoCard } from "@/components/ui/block-info-card" @@ -51,20 +51,25 @@ With Slack, you can: - **Enhance agent workflows**: Integrate Slack messaging into your agents to deliver results, alerts, and status updates - **Create and share Slack canvases**: Programmatically generate collaborative documents (canvases) in Slack channels - **Read messages from channels**: Retrieve and process recent messages from any Slack channel for monitoring or workflow triggers +- **Manage bot messages**: Update, delete, and add reactions to messages sent by your bot -In Sim, the Slack integration enables your agents to programmatically interact with Slack in several ways as part of their workflows: +In Sim, the Slack integration enables your agents to programmatically interact with Slack with full message management capabilities as part of their workflows: -- **Send messages**: Agents can send formatted messages to any Slack channel or user, supporting Slack's mrkdwn syntax for rich formatting. -- **Create canvases**: Agents can create and share Slack canvases (collaborative documents) directly in channels, enabling richer content sharing and documentation. -- **Read messages**: Agents can read recent messages from channels, allowing for monitoring, reporting, or triggering further actions based on channel activity. +- **Send messages**: Agents can send formatted messages to any Slack channel or user, supporting Slack's mrkdwn syntax for rich formatting +- **Update messages**: Edit previously sent bot messages to correct information or provide status updates +- **Delete messages**: Remove bot messages when they're no longer needed or contain errors +- **Add reactions**: Express sentiment or acknowledgment by adding emoji reactions to any message +- **Create canvases**: Create and share Slack canvases (collaborative documents) directly in channels, enabling richer content sharing and documentation +- **Read messages**: Read recent messages from channels, allowing for monitoring, reporting, or triggering further actions based on channel activity +- **Download files**: Retrieve files shared in Slack channels for processing or archival -This allows for powerful automation scenarios such as sending notifications, alerts, updates, and reports directly to your team's communication hub, sharing structured documents, or monitoring conversations for workflow triggers. Your agents can deliver timely information, share results from processes they've completed, create collaborative documents, or alert team members when attention is needed. This integration bridges the gap between your AI workflows and your team's communication, ensuring everyone stays informed without manual intervention. By connecting Sim with Slack, you can create agents that keep your team updated with relevant information at the right time, enhance collaboration by sharing insights automatically, and reduce the need for manual status updates—all while leveraging your existing Slack workspace where your team already communicates. +This allows for powerful automation scenarios such as sending notifications with dynamic updates, managing conversational flows with editable status messages, acknowledging important messages with reactions, and maintaining clean channels by removing outdated bot messages. Your agents can deliver timely information, update messages as workflows progress, create collaborative documents, or alert team members when attention is needed. This integration bridges the gap between your AI workflows and your team's communication, ensuring everyone stays informed with accurate, up-to-date information. By connecting Sim with Slack, you can create agents that keep your team updated with relevant information at the right time, enhance collaboration by sharing and updating insights automatically, and reduce the need for manual status updates—all while leveraging your existing Slack workspace where your team already communicates. {/* MANUAL-CONTENT-END */} ## Usage Instructions -Integrate Slack into the workflow. Can send messages, create canvases, and read messages. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel. +Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel. @@ -82,6 +87,7 @@ Send messages to Slack channels or users through the Slack API. Supports Slack m | `botToken` | string | No | Bot token for Custom Bot | | `channel` | string | Yes | Target Slack channel \(e.g., #general\) | | `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) | +| `thread_ts` | string | No | Thread timestamp to reply to \(creates thread reply\) | | `files` | file[] | No | Files to attach to the message | #### Output @@ -135,6 +141,87 @@ Read the latest messages from Slack channels. Retrieve conversation history with | --------- | ---- | ----------- | | `messages` | array | Array of message objects from the channel | +### `slack_download` + +Download a file from Slack + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `fileId` | string | Yes | The ID of the file to download | +| `fileName` | string | No | Optional filename override | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `file` | file | Downloaded file stored in execution files | + +### `slack_update_message` + +Update a message previously sent by the bot in Slack + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) | +| `timestamp` | string | Yes | Timestamp of the message to update \(e.g., 1405894322.002768\) | +| `text` | string | Yes | New message text \(supports Slack mrkdwn formatting\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Updated message metadata | + +### `slack_delete_message` + +Delete a message previously sent by the bot in Slack + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) | +| `timestamp` | string | Yes | Timestamp of the message to delete \(e.g., 1405894322.002768\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Deleted message metadata | + +### `slack_add_reaction` + +Add an emoji reaction to a Slack message + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ---- | -------- | ----------- | +| `authMethod` | string | No | Authentication method: oauth or bot_token | +| `botToken` | string | No | Bot token for Custom Bot | +| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) | +| `timestamp` | string | Yes | Timestamp of the message to react to \(e.g., 1405894322.002768\) | +| `name` | string | Yes | Name of the emoji reaction \(without colons, e.g., thumbsup, heart, eyes\) | + +#### Output + +| Parameter | Type | Description | +| --------- | ---- | ----------- | +| `content` | string | Success message | +| `metadata` | object | Reaction metadata | + ## Notes diff --git a/apps/sim/app/api/__test-utils__/utils.ts b/apps/sim/app/api/__test-utils__/utils.ts index 43d8ba2c7..8b76c8c31 100644 --- a/apps/sim/app/api/__test-utils__/utils.ts +++ b/apps/sim/app/api/__test-utils__/utils.ts @@ -1349,6 +1349,19 @@ export function setupFileApiMocks( }), })) + vi.doMock('@/lib/uploads/contexts/workspace', () => ({ + uploadWorkspaceFile: vi.fn().mockResolvedValue({ + id: 'test-file-id', + name: 'test.txt', + url: '/api/files/serve/workspace/test-workspace-id/test-file.txt', + size: 100, + type: 'text/plain', + key: 'workspace/test-workspace-id/1234567890-test.txt', + uploadedAt: new Date().toISOString(), + expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), + }), + })) + mockFileSystem({ writeFileSuccess: true, readFileContent: 'test content', diff --git a/apps/sim/app/api/copilot/chat/route.test.ts b/apps/sim/app/api/copilot/chat/route.test.ts index d2c00d47b..e2950cced 100644 --- a/apps/sim/app/api/copilot/chat/route.test.ts +++ b/apps/sim/app/api/copilot/chat/route.test.ts @@ -102,12 +102,25 @@ describe('Copilot Chat API Route', () => { generateRequestId: vi.fn(() => 'test-request-id'), })) + const mockEnvValues = { + SIM_AGENT_API_URL: 'http://localhost:8000', + COPILOT_API_KEY: 'test-sim-agent-key', + BETTER_AUTH_URL: 'http://localhost:3000', + NEXT_PUBLIC_APP_URL: 'http://localhost:3000', + NODE_ENV: 'test', + } as const + vi.doMock('@/lib/env', () => ({ - env: { - SIM_AGENT_API_URL: 'http://localhost:8000', - COPILOT_API_KEY: 'test-sim-agent-key', - BETTER_AUTH_URL: 'http://localhost:3000', - }, + env: mockEnvValues, + getEnv: (variable: string) => mockEnvValues[variable as keyof typeof mockEnvValues], + isTruthy: (value: string | boolean | number | undefined) => + typeof value === 'string' + ? value.toLowerCase() === 'true' || value === '1' + : Boolean(value), + isFalsy: (value: string | boolean | number | undefined) => + typeof value === 'string' + ? value.toLowerCase() === 'false' || value === '0' + : value === false, })) global.fetch = vi.fn() diff --git a/apps/sim/app/api/files/authorization.ts b/apps/sim/app/api/files/authorization.ts index 27e671fef..f2cb2673b 100644 --- a/apps/sim/app/api/files/authorization.ts +++ b/apps/sim/app/api/files/authorization.ts @@ -76,15 +76,18 @@ export async function lookupWorkspaceFileByKey( * Pattern: {workspaceId}/{timestamp}-{random}-{filename} */ function extractWorkspaceIdFromKey(key: string): string | null { - // Use inferContextFromKey to check if it's a workspace file const inferredContext = inferContextFromKey(key) if (inferredContext !== 'workspace') { return null } + // Use the proper parsing utility from workspace context module const parts = key.split('/') const workspaceId = parts[0] - if (workspaceId && /^[a-f0-9-]{36}$/.test(workspaceId)) { + + // Validate UUID format + const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i + if (workspaceId && UUID_PATTERN.test(workspaceId)) { return workspaceId } @@ -228,7 +231,8 @@ function isExecutionFile(cloudKey: string, bucketType?: string | null): boolean /** * Verify access to execution files - * Execution files: workspace_id/workflow_id/execution_id/filename + * Modern format: execution/workspace_id/workflow_id/execution_id/filename + * Legacy format: workspace_id/workflow_id/execution_id/filename */ async function verifyExecutionFileAccess( cloudKey: string, @@ -236,12 +240,25 @@ async function verifyExecutionFileAccess( customConfig?: StorageConfig ): Promise { const parts = cloudKey.split('/') - if (parts.length < 3) { - logger.warn('Invalid execution file path format', { cloudKey }) - return false + + // Determine if this is modern prefixed or legacy format + let workspaceId: string + if (parts[0] === 'execution') { + // Modern format: execution/workspaceId/workflowId/executionId/filename + if (parts.length < 5) { + logger.warn('Invalid execution file path format (modern)', { cloudKey }) + return false + } + workspaceId = parts[1] + } else { + // Legacy format: workspaceId/workflowId/executionId/filename + if (parts.length < 4) { + logger.warn('Invalid execution file path format (legacy)', { cloudKey }) + return false + } + workspaceId = parts[0] } - const workspaceId = parts[0] if (!workspaceId) { logger.warn('Could not extract workspaceId from execution file path', { cloudKey }) return false diff --git a/apps/sim/app/api/files/delete/route.test.ts b/apps/sim/app/api/files/delete/route.test.ts index 73af7def6..b9cb3297f 100644 --- a/apps/sim/app/api/files/delete/route.test.ts +++ b/apps/sim/app/api/files/delete/route.test.ts @@ -18,7 +18,7 @@ describe('File Delete API Route', () => { }) const req = createMockRequest('POST', { - filePath: '/api/files/serve/test-file.txt', + filePath: '/api/files/serve/workspace/test-workspace-id/test-file.txt', }) const { POST } = await import('@/app/api/files/delete/route') @@ -39,7 +39,7 @@ describe('File Delete API Route', () => { }) const req = createMockRequest('POST', { - filePath: '/api/files/serve/nonexistent.txt', + filePath: '/api/files/serve/workspace/test-workspace-id/nonexistent.txt', }) const { POST } = await import('@/app/api/files/delete/route') @@ -59,7 +59,7 @@ describe('File Delete API Route', () => { }) const req = createMockRequest('POST', { - filePath: '/api/files/serve/s3/1234567890-test-file.txt', + filePath: '/api/files/serve/s3/workspace/test-workspace-id/1234567890-test-file.txt', }) const { POST } = await import('@/app/api/files/delete/route') @@ -73,8 +73,8 @@ describe('File Delete API Route', () => { const storageService = await import('@/lib/uploads/core/storage-service') expect(storageService.deleteFile).toHaveBeenCalledWith({ - key: '1234567890-test-file.txt', - context: 'general', + key: 'workspace/test-workspace-id/1234567890-test-file.txt', + context: 'workspace', }) }) @@ -85,7 +85,7 @@ describe('File Delete API Route', () => { }) const req = createMockRequest('POST', { - filePath: '/api/files/serve/blob/1234567890-test-document.pdf', + filePath: '/api/files/serve/blob/workspace/test-workspace-id/1234567890-test-document.pdf', }) const { POST } = await import('@/app/api/files/delete/route') @@ -99,8 +99,8 @@ describe('File Delete API Route', () => { const storageService = await import('@/lib/uploads/core/storage-service') expect(storageService.deleteFile).toHaveBeenCalledWith({ - key: '1234567890-test-document.pdf', - context: 'general', + key: 'workspace/test-workspace-id/1234567890-test-document.pdf', + context: 'workspace', }) }) diff --git a/apps/sim/app/api/files/execution/[executionId]/[fileId]/route.ts b/apps/sim/app/api/files/execution/[executionId]/[fileId]/route.ts deleted file mode 100644 index 49dee1c46..000000000 --- a/apps/sim/app/api/files/execution/[executionId]/[fileId]/route.ts +++ /dev/null @@ -1,124 +0,0 @@ -import { db } from '@sim/db' -import { workflow, workflowExecutionLogs } from '@sim/db/schema' -import { eq } from 'drizzle-orm' -import { type NextRequest, NextResponse } from 'next/server' -import { checkHybridAuth } from '@/lib/auth/hybrid' -import { createLogger } from '@/lib/logs/console/logger' -import { getUserEntityPermissions } from '@/lib/permissions/utils' -import { - generateExecutionFileDownloadUrl, - getExecutionFiles, -} from '@/lib/uploads/contexts/execution' -import type { UserFile } from '@/executor/types' - -const logger = createLogger('ExecutionFileDownloadAPI') - -/** - * Generate a short-lived presigned URL for secure execution file download - * GET /api/files/execution/[executionId]/[fileId] - */ -export async function GET( - request: NextRequest, - { params }: { params: Promise<{ executionId: string; fileId: string }> } -) { - try { - const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) - - if (!authResult.success || !authResult.userId) { - logger.warn('Unauthorized execution file download request', { - error: authResult.error || 'Missing userId', - }) - return NextResponse.json({ error: 'Unauthorized' }, { status: 401 }) - } - - const userId = authResult.userId - const { executionId, fileId } = await params - - if (!executionId || !fileId) { - return NextResponse.json({ error: 'Execution ID and File ID are required' }, { status: 400 }) - } - - logger.info(`Generating download URL for file ${fileId} in execution ${executionId}`) - - const [executionLog] = await db - .select({ - workflowId: workflowExecutionLogs.workflowId, - }) - .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.executionId, executionId)) - .limit(1) - - if (!executionLog) { - return NextResponse.json({ error: 'Execution not found' }, { status: 404 }) - } - - const [workflowData] = await db - .select({ - workspaceId: workflow.workspaceId, - }) - .from(workflow) - .where(eq(workflow.id, executionLog.workflowId)) - .limit(1) - - if (!workflowData) { - return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) - } - - if (!workflowData.workspaceId) { - logger.warn('Workflow missing workspaceId', { - workflowId: executionLog.workflowId, - executionId, - }) - return NextResponse.json({ error: 'Workflow not found' }, { status: 404 }) - } - - const permission = await getUserEntityPermissions(userId, 'workspace', workflowData.workspaceId) - if (permission === null) { - logger.warn('User does not have workspace access for execution file', { - userId, - workspaceId: workflowData.workspaceId, - executionId, - fileId, - }) - return NextResponse.json({ error: 'File not found' }, { status: 404 }) - } - - const executionFiles = await getExecutionFiles(executionId) - - if (executionFiles.length === 0) { - return NextResponse.json({ error: 'No files found for this execution' }, { status: 404 }) - } - - const file = executionFiles.find((f) => f.id === fileId) - if (!file) { - return NextResponse.json({ error: 'File not found in this execution' }, { status: 404 }) - } - - if (new Date(file.expiresAt) < new Date()) { - return NextResponse.json({ error: 'File has expired' }, { status: 410 }) - } - - const userFile: UserFile = file - - const downloadUrl = await generateExecutionFileDownloadUrl(userFile) - - logger.info(`Generated download URL for file ${file.name} (execution: ${executionId})`) - - const response = NextResponse.json({ - downloadUrl, - fileName: file.name, - fileSize: file.size, - fileType: file.type, - expiresIn: 300, // 5 minutes - }) - - response.headers.set('Cache-Control', 'no-cache, no-store, must-revalidate') - response.headers.set('Pragma', 'no-cache') - response.headers.set('Expires', '0') - - return response - } catch (error) { - logger.error('Error generating execution file download URL:', error) - return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) - } -} diff --git a/apps/sim/app/api/files/presigned/batch/route.ts b/apps/sim/app/api/files/presigned/batch/route.ts index 8d5956b68..4f52f334d 100644 --- a/apps/sim/app/api/files/presigned/batch/route.ts +++ b/apps/sim/app/api/files/presigned/batch/route.ts @@ -53,16 +53,19 @@ export async function POST(request: NextRequest) { } const uploadTypeParam = request.nextUrl.searchParams.get('type') - const uploadType: StorageContext = - uploadTypeParam === 'knowledge-base' - ? 'knowledge-base' - : uploadTypeParam === 'chat' - ? 'chat' - : uploadTypeParam === 'copilot' - ? 'copilot' - : uploadTypeParam === 'profile-pictures' - ? 'profile-pictures' - : 'general' + if (!uploadTypeParam) { + return NextResponse.json({ error: 'type query parameter is required' }, { status: 400 }) + } + + const validTypes: StorageContext[] = ['knowledge-base', 'chat', 'copilot', 'profile-pictures'] + if (!validTypes.includes(uploadTypeParam as StorageContext)) { + return NextResponse.json( + { error: `Invalid type parameter. Must be one of: ${validTypes.join(', ')}` }, + { status: 400 } + ) + } + + const uploadType = uploadTypeParam as StorageContext const MAX_FILE_SIZE = 100 * 1024 * 1024 for (const file of files) { diff --git a/apps/sim/app/api/files/presigned/route.test.ts b/apps/sim/app/api/files/presigned/route.test.ts index 412b4f8f2..6dcac5c62 100644 --- a/apps/sim/app/api/files/presigned/route.test.ts +++ b/apps/sim/app/api/files/presigned/route.test.ts @@ -33,7 +33,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test.txt', @@ -162,7 +162,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test document.txt', @@ -177,7 +177,7 @@ describe('/api/files/presigned', () => { expect(response.status).toBe(200) expect(data.presignedUrl).toBe('https://example.com/presigned-url') expect(data.fileInfo).toMatchObject({ - path: expect.stringMatching(/\/api\/files\/serve\/s3\/.+\?context=general$/), // general uploads use serve path + path: expect.stringMatching(/\/api\/files\/serve\/s3\/.+\?context=chat$/), key: expect.stringMatching(/.*test.document\.txt$/), name: 'test document.txt', size: 1024, @@ -249,7 +249,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test document.txt', @@ -315,7 +315,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test.txt', @@ -345,7 +345,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test.txt', @@ -377,7 +377,7 @@ describe('/api/files/presigned', () => { const { POST } = await import('@/app/api/files/presigned/route') - const request = new NextRequest('http://localhost:3000/api/files/presigned', { + const request = new NextRequest('http://localhost:3000/api/files/presigned?type=chat', { method: 'POST', body: JSON.stringify({ fileName: 'test.txt', diff --git a/apps/sim/app/api/files/presigned/route.ts b/apps/sim/app/api/files/presigned/route.ts index e38fa309c..adbd43970 100644 --- a/apps/sim/app/api/files/presigned/route.ts +++ b/apps/sim/app/api/files/presigned/route.ts @@ -69,16 +69,16 @@ export async function POST(request: NextRequest) { } const uploadTypeParam = request.nextUrl.searchParams.get('type') - const uploadType: StorageContext = - uploadTypeParam === 'knowledge-base' - ? 'knowledge-base' - : uploadTypeParam === 'chat' - ? 'chat' - : uploadTypeParam === 'copilot' - ? 'copilot' - : uploadTypeParam === 'profile-pictures' - ? 'profile-pictures' - : 'general' + if (!uploadTypeParam) { + throw new ValidationError('type query parameter is required') + } + + const validTypes: StorageContext[] = ['knowledge-base', 'chat', 'copilot', 'profile-pictures'] + if (!validTypes.includes(uploadTypeParam as StorageContext)) { + throw new ValidationError(`Invalid type parameter. Must be one of: ${validTypes.join(', ')}`) + } + + const uploadType = uploadTypeParam as StorageContext if (uploadType === 'knowledge-base') { const fileValidationError = validateFileType(fileName, contentType) diff --git a/apps/sim/app/api/files/serve/[...path]/route.test.ts b/apps/sim/app/api/files/serve/[...path]/route.test.ts index c18f525a1..0a774fc34 100644 --- a/apps/sim/app/api/files/serve/[...path]/route.test.ts +++ b/apps/sim/app/api/files/serve/[...path]/route.test.ts @@ -69,18 +69,23 @@ describe('File Serve API Route', () => { }) it('should serve local file successfully', async () => { - const req = new NextRequest('http://localhost:3000/api/files/serve/test-file.txt') - const params = { path: ['test-file.txt'] } + const req = new NextRequest( + 'http://localhost:3000/api/files/serve/workspace/test-workspace-id/test-file.txt' + ) + const params = { path: ['workspace', 'test-workspace-id', 'test-file.txt'] } const { GET } = await import('@/app/api/files/serve/[...path]/route') const response = await GET(req, { params: Promise.resolve(params) }) expect(response.status).toBe(200) expect(response.headers.get('Content-Type')).toBe('text/plain') - expect(response.headers.get('Content-Disposition')).toBe('inline; filename="test-file.txt"') + const disposition = response.headers.get('Content-Disposition') + expect(disposition).toContain('inline') + expect(disposition).toContain('filename=') + expect(disposition).toContain('test-file.txt') const fs = await import('fs/promises') - expect(fs.readFile).toHaveBeenCalledWith('/test/uploads/test-file.txt') + expect(fs.readFile).toHaveBeenCalled() }) it('should handle nested paths correctly', async () => { @@ -125,8 +130,10 @@ describe('File Serve API Route', () => { verifyFileAccess: vi.fn().mockResolvedValue(true), })) - const req = new NextRequest('http://localhost:3000/api/files/serve/nested/path/file.txt') - const params = { path: ['nested', 'path', 'file.txt'] } + const req = new NextRequest( + 'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nested-path-file.txt' + ) + const params = { path: ['workspace', 'test-workspace-id', 'nested-path-file.txt'] } const { GET } = await import('@/app/api/files/serve/[...path]/route') const response = await GET(req, { params: Promise.resolve(params) }) @@ -203,8 +210,10 @@ describe('File Serve API Route', () => { findLocalFile: vi.fn().mockReturnValue('/test/uploads/test-file.txt'), })) - const req = new NextRequest('http://localhost:3000/api/files/serve/s3/1234567890-image.png') - const params = { path: ['s3', '1234567890-image.png'] } + const req = new NextRequest( + 'http://localhost:3000/api/files/serve/s3/workspace/test-workspace-id/1234567890-image.png' + ) + const params = { path: ['s3', 'workspace', 'test-workspace-id', '1234567890-image.png'] } const { GET } = await import('@/app/api/files/serve/[...path]/route') const response = await GET(req, { params: Promise.resolve(params) }) @@ -213,8 +222,8 @@ describe('File Serve API Route', () => { expect(response.headers.get('Content-Type')).toBe('image/png') expect(downloadFileMock).toHaveBeenCalledWith({ - key: '1234567890-image.png', - context: 'general', + key: 'workspace/test-workspace-id/1234567890-image.png', + context: 'workspace', }) }) @@ -260,8 +269,10 @@ describe('File Serve API Route', () => { findLocalFile: vi.fn().mockReturnValue(null), })) - const req = new NextRequest('http://localhost:3000/api/files/serve/nonexistent.txt') - const params = { path: ['nonexistent.txt'] } + const req = new NextRequest( + 'http://localhost:3000/api/files/serve/workspace/test-workspace-id/nonexistent.txt' + ) + const params = { path: ['workspace', 'test-workspace-id', 'nonexistent.txt'] } const { GET } = await import('@/app/api/files/serve/[...path]/route') const response = await GET(req, { params: Promise.resolve(params) }) @@ -318,8 +329,10 @@ describe('File Serve API Route', () => { createErrorResponse: () => new Response(null, { status: 404 }), })) - const req = new NextRequest(`http://localhost:3000/api/files/serve/file.${test.ext}`) - const params = { path: [`file.${test.ext}`] } + const req = new NextRequest( + `http://localhost:3000/api/files/serve/workspace/test-workspace-id/file.${test.ext}` + ) + const params = { path: ['workspace', 'test-workspace-id', `file.${test.ext}`] } const { GET } = await import('@/app/api/files/serve/[...path]/route') const response = await GET(req, { params: Promise.resolve(params) }) diff --git a/apps/sim/app/api/files/upload/route.test.ts b/apps/sim/app/api/files/upload/route.test.ts index 34aa1f6c3..9b4b38b4a 100644 --- a/apps/sim/app/api/files/upload/route.test.ts +++ b/apps/sim/app/api/files/upload/route.test.ts @@ -8,8 +8,10 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { setupFileApiMocks } from '@/app/api/__test-utils__/utils' describe('File Upload API Route', () => { - const createMockFormData = (files: File[]): FormData => { + const createMockFormData = (files: File[], context = 'workspace'): FormData => { const formData = new FormData() + formData.append('context', context) + formData.append('workspaceId', 'test-workspace-id') files.forEach((file) => { formData.append('file', file) }) @@ -60,14 +62,15 @@ describe('File Upload API Route', () => { } expect(response.status).toBe(200) - expect(data).toHaveProperty('path') - expect(data.path).toMatch(/\/api\/files\/serve\/.*\.txt$/) + expect(data).toHaveProperty('url') + expect(data.url).toMatch(/\/api\/files\/serve\/.*\.txt$/) expect(data).toHaveProperty('name', 'test.txt') expect(data).toHaveProperty('size') expect(data).toHaveProperty('type', 'text/plain') + expect(data).toHaveProperty('key') - const { StorageService } = await import('@/lib/uploads') - expect(StorageService.uploadFile).toHaveBeenCalled() + const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace') + expect(uploadWorkspaceFile).toHaveBeenCalled() }) it('should upload a file to S3 when in S3 mode', async () => { @@ -90,14 +93,15 @@ describe('File Upload API Route', () => { const data = await response.json() expect(response.status).toBe(200) - expect(data).toHaveProperty('path') - expect(data.path).toContain('/api/files/serve/') + expect(data).toHaveProperty('url') + expect(data.url).toContain('/api/files/serve/') expect(data).toHaveProperty('name', 'test.txt') expect(data).toHaveProperty('size') expect(data).toHaveProperty('type', 'text/plain') + expect(data).toHaveProperty('key') - const uploads = await import('@/lib/uploads') - expect(uploads.StorageService.uploadFile).toHaveBeenCalled() + const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace') + expect(uploadWorkspaceFile).toHaveBeenCalled() }) it('should handle multiple file uploads', async () => { @@ -146,14 +150,15 @@ describe('File Upload API Route', () => { }) it('should handle S3 upload errors', async () => { + vi.resetModules() + setupFileApiMocks({ cloudEnabled: true, storageProvider: 's3', }) - vi.doMock('@/lib/uploads/core/storage-service', () => ({ - uploadFile: vi.fn().mockRejectedValue(new Error('Upload failed')), - hasCloudStorage: vi.fn().mockReturnValue(true), + vi.doMock('@/lib/uploads/contexts/workspace', () => ({ + uploadWorkspaceFile: vi.fn().mockRejectedValue(new Error('Storage limit exceeded')), })) const mockFile = createMockFile() @@ -169,9 +174,11 @@ describe('File Upload API Route', () => { const response = await POST(req) const data = await response.json() - expect(response.status).toBe(500) + expect(response.status).toBe(413) expect(data).toHaveProperty('error') expect(typeof data.error).toBe('string') + + vi.resetModules() }) it('should handle CORS preflight requests', async () => { @@ -223,6 +230,14 @@ describe('File Upload Security Tests', () => { }) describe('File Extension Validation', () => { + beforeEach(() => { + vi.resetModules() + setupFileApiMocks({ + cloudEnabled: false, + storageProvider: 'local', + }) + }) + it('should accept allowed file types', async () => { const allowedTypes = [ 'pdf', @@ -243,6 +258,8 @@ describe('File Upload Security Tests', () => { const formData = new FormData() const file = new File(['test content'], `test.${ext}`, { type: 'application/octet-stream' }) formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', @@ -261,6 +278,29 @@ describe('File Upload Security Tests', () => { const maliciousContent = '' const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') + + const req = new Request('http://localhost/api/files/upload', { + method: 'POST', + body: formData, + }) + + const { POST } = await import('@/app/api/files/upload/route') + const response = await POST(req as any) + + expect(response.status).toBe(400) + const data = await response.json() + expect(data.message).toContain("File type 'html' is not allowed") + }) + + it('should reject HTML files to prevent XSS', async () => { + const formData = new FormData() + const maliciousContent = '' + const file = new File([maliciousContent], 'malicious.html', { type: 'text/html' }) + formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', @@ -280,6 +320,8 @@ describe('File Upload Security Tests', () => { const maliciousSvg = '' const file = new File([maliciousSvg], 'malicious.svg', { type: 'image/svg+xml' }) formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', @@ -299,6 +341,8 @@ describe('File Upload Security Tests', () => { const maliciousJs = 'alert("XSS")' const file = new File([maliciousJs], 'malicious.js', { type: 'application/javascript' }) formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', @@ -317,6 +361,8 @@ describe('File Upload Security Tests', () => { const formData = new FormData() const file = new File(['test content'], 'noextension', { type: 'application/octet-stream' }) formData.append('file', file) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', @@ -341,6 +387,8 @@ describe('File Upload Security Tests', () => { type: 'text/html', }) formData.append('file', invalidFile) + formData.append('context', 'workspace') + formData.append('workspaceId', 'test-workspace-id') const req = new Request('http://localhost/api/files/upload', { method: 'POST', diff --git a/apps/sim/app/api/files/upload/route.ts b/apps/sim/app/api/files/upload/route.ts index 02274bb47..663164ebb 100644 --- a/apps/sim/app/api/files/upload/route.ts +++ b/apps/sim/app/api/files/upload/route.ts @@ -60,25 +60,19 @@ export async function POST(request: NextRequest) { const workspaceId = formData.get('workspaceId') as string | null const contextParam = formData.get('context') as string | null - // Determine context: explicit > workspace > execution > general - const context: StorageContext = - (contextParam as StorageContext) || - (workspaceId ? 'workspace' : workflowId && executionId ? 'execution' : 'general') + // Context must be explicitly provided + if (!contextParam) { + throw new InvalidRequestError( + 'Upload requires explicit context parameter (knowledge-base, workspace, execution, copilot, chat, or profile-pictures)' + ) + } + + const context = contextParam as StorageContext const storageService = await import('@/lib/uploads/core/storage-service') const usingCloudStorage = storageService.hasCloudStorage() logger.info(`Using storage mode: ${usingCloudStorage ? 'Cloud' : 'Local'} for file upload`) - if (workflowId && executionId) { - logger.info( - `Uploading files for execution-scoped storage: workflow=${workflowId}, execution=${executionId}` - ) - } else if (workspaceId) { - logger.info(`Uploading files for workspace-scoped storage: workspace=${workspaceId}`) - } else if (contextParam) { - logger.info(`Uploading files for ${contextParam} context`) - } - const uploadResults = [] for (const file of files) { @@ -94,8 +88,14 @@ export async function POST(request: NextRequest) { const bytes = await file.arrayBuffer() const buffer = Buffer.from(bytes) - // Priority 1: Execution-scoped storage (temporary, 5 min expiry) - if (workflowId && executionId) { + // Handle execution context + if (context === 'execution') { + if (!workflowId || !executionId) { + throw new InvalidRequestError( + 'Execution context requires workflowId and executionId parameters' + ) + } + const { uploadExecutionFile } = await import('@/lib/uploads/contexts/execution') const userFile = await uploadExecutionFile( { @@ -106,14 +106,14 @@ export async function POST(request: NextRequest) { buffer, originalName, file.type, - session.user.id // userId available from session + session.user.id ) uploadResults.push(userFile) continue } - // Priority 2: Knowledge-base files (must check BEFORE workspace to avoid duplicate file check) + // Handle knowledge-base context if (context === 'knowledge-base') { // Validate file type for knowledge base const validationError = validateFileType(originalName, file.type) @@ -178,9 +178,12 @@ export async function POST(request: NextRequest) { continue } - // Priority 3: Workspace-scoped storage (persistent, no expiry) - // Only if context is NOT explicitly set to something else - if (workspaceId && !contextParam) { + // Handle workspace context + if (context === 'workspace') { + if (!workspaceId) { + throw new InvalidRequestError('Workspace context requires workspaceId parameter') + } + try { const { uploadWorkspaceFile } = await import('@/lib/uploads/contexts/workspace') const userFile = await uploadWorkspaceFile( @@ -218,7 +221,7 @@ export async function POST(request: NextRequest) { } } - // Priority 4: Context-specific uploads (copilot, chat, profile-pictures) + // Handle image-only contexts (copilot, chat, profile-pictures) if (context === 'copilot' || context === 'chat' || context === 'profile-pictures') { if (!isImageFileType(file.type)) { throw new InvalidRequestError( @@ -281,60 +284,10 @@ export async function POST(request: NextRequest) { continue } - // Priority 5: General uploads (fallback) - try { - logger.info(`Uploading file (general context): ${originalName}`) - - const metadata: Record = { - originalName: originalName, - uploadedAt: new Date().toISOString(), - purpose: 'general', - userId: session.user.id, - } - - if (workspaceId) { - metadata.workspaceId = workspaceId - } - - const fileInfo = await storageService.uploadFile({ - file: buffer, - fileName: originalName, - contentType: file.type, - context: 'general', - metadata, - }) - - let downloadUrl: string | undefined - if (storageService.hasCloudStorage()) { - try { - downloadUrl = await storageService.generatePresignedDownloadUrl( - fileInfo.key, - 'general', - 24 * 60 * 60 // 24 hours - ) - } catch (error) { - logger.warn(`Failed to generate presigned URL for ${originalName}:`, error) - } - } - - const uploadResult = { - name: originalName, - size: buffer.length, - type: file.type, - key: fileInfo.key, - path: fileInfo.path, - url: downloadUrl || fileInfo.path, - uploadedAt: new Date().toISOString(), - expiresAt: new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), // 24 hours - context: 'general', - } - - logger.info(`Successfully uploaded: ${fileInfo.key}`) - uploadResults.push(uploadResult) - } catch (error) { - logger.error(`Error uploading ${originalName}:`, error) - throw error - } + // Unknown context + throw new InvalidRequestError( + `Unsupported context: ${context}. Use knowledge-base, workspace, execution, copilot, chat, or profile-pictures` + ) } if (uploadResults.length === 1) { diff --git a/apps/sim/app/api/files/utils.ts b/apps/sim/app/api/files/utils.ts index eb73df982..9007047de 100644 --- a/apps/sim/app/api/files/utils.ts +++ b/apps/sim/app/api/files/utils.ts @@ -232,7 +232,10 @@ function getSecureFileHeaders(filename: string, originalContentType: string) { } } -function encodeFilenameForHeader(filename: string): string { +function encodeFilenameForHeader(storageKey: string): string { + // Extract just the filename from the storage key (last segment after /) + const filename = storageKey.split('/').pop() || storageKey + const hasNonAscii = /[^\x00-\x7F]/.test(filename) if (!hasNonAscii) { diff --git a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts index 22c37b90f..07f9b9846 100644 --- a/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts +++ b/apps/sim/app/api/knowledge/[id]/documents/[documentId]/route.test.ts @@ -59,12 +59,19 @@ describe('Document By ID API Route', () => { chunkCount: 5, tokenCount: 100, characterCount: 500, - processingStatus: 'completed', + processingStatus: 'completed' as const, processingStartedAt: new Date('2023-01-01T10:00:00Z'), processingCompletedAt: new Date('2023-01-01T10:05:00Z'), processingError: null, enabled: true, uploadedAt: new Date('2023-01-01T09:00:00Z'), + tag1: null, + tag2: null, + tag3: null, + tag4: null, + tag5: null, + tag6: null, + tag7: null, deletedAt: null, } diff --git a/apps/sim/app/api/logs/cleanup/route.ts b/apps/sim/app/api/logs/cleanup/route.ts index 9c182613e..fc4c00a5f 100644 --- a/apps/sim/app/api/logs/cleanup/route.ts +++ b/apps/sim/app/api/logs/cleanup/route.ts @@ -110,7 +110,7 @@ export async function GET(request: NextRequest) { for (const log of oldEnhancedLogs) { const today = new Date().toISOString().split('T')[0] - const enhancedLogKey = `archived-enhanced-logs/${today}/${log.id}.json` + const enhancedLogKey = `logs/archived/${today}/${log.id}.json` const enhancedLogData = JSON.stringify({ ...log, archivedAt: new Date().toISOString(), @@ -122,7 +122,7 @@ export async function GET(request: NextRequest) { file: Buffer.from(enhancedLogData), fileName: enhancedLogKey, contentType: 'application/json', - context: 'general', + context: 'logs', metadata: { logId: String(log.id), workflowId: String(log.workflowId), @@ -141,10 +141,15 @@ export async function GET(request: NextRequest) { try { await StorageService.deleteFile({ key: file.key, - context: 'general', + context: 'execution', }) results.files.deleted++ - logger.info(`Deleted file: ${file.key}`) + + // Also delete from workspace_files table + const { deleteFileMetadata } = await import('@/lib/uploads/server/metadata') + await deleteFileMetadata(file.key) + + logger.info(`Deleted execution file: ${file.key}`) } catch (fileError) { results.files.deleteFailed++ logger.error(`Failed to delete file ${file.key}:`, { fileError }) @@ -163,26 +168,22 @@ export async function GET(request: NextRequest) { results.enhancedLogs.deleted++ } else { results.enhancedLogs.deleteFailed++ - logger.warn( - `Failed to delete enhanced log ${log.id} after archiving: No rows deleted` - ) + logger.warn(`Failed to delete log ${log.id} after archiving: No rows deleted`) } } catch (deleteError) { results.enhancedLogs.deleteFailed++ - logger.error(`Error deleting enhanced log ${log.id} after archiving:`, { deleteError }) + logger.error(`Error deleting log ${log.id} after archiving:`, { deleteError }) } } catch (archiveError) { results.enhancedLogs.archiveFailed++ - logger.error(`Failed to archive enhanced log ${log.id}:`, { archiveError }) + logger.error(`Failed to archive log ${log.id}:`, { archiveError }) } } batchesProcessed++ hasMoreLogs = oldEnhancedLogs.length === BATCH_SIZE - logger.info( - `Processed enhanced logs batch ${batchesProcessed}: ${oldEnhancedLogs.length} logs` - ) + logger.info(`Processed logs batch ${batchesProcessed}: ${oldEnhancedLogs.length} logs`) } try { diff --git a/apps/sim/app/api/proxy/route.ts b/apps/sim/app/api/proxy/route.ts index 8848ed6cd..9dae0324c 100644 --- a/apps/sim/app/api/proxy/route.ts +++ b/apps/sim/app/api/proxy/route.ts @@ -306,12 +306,17 @@ export async function POST(request: NextRequest) { (output) => output.type === 'file' || output.type === 'file[]' ) + // Add userId to execution context for file uploads + const contextWithUser = executionContext + ? { ...executionContext, userId: authResult.userId } + : undefined + const result = await executeTool( toolId, params, true, // skipProxy (we're already in the proxy) !hasFileOutputs, // skipPostProcess (don't skip if tool has file outputs) - executionContext // pass execution context for file processing + contextWithUser // pass execution context with userId for file processing ) if (!result.success) { diff --git a/apps/sim/app/api/proxy/tts/route.ts b/apps/sim/app/api/proxy/tts/route.ts index 2a8a869c2..ee92b9acd 100644 --- a/apps/sim/app/api/proxy/tts/route.ts +++ b/apps/sim/app/api/proxy/tts/route.ts @@ -17,7 +17,15 @@ export async function POST(request: NextRequest) { } const body = await request.json() - const { text, voiceId, apiKey, modelId = 'eleven_monolingual_v1' } = body + const { + text, + voiceId, + apiKey, + modelId = 'eleven_monolingual_v1', + workspaceId, + workflowId, + executionId, + } = body if (!text || !voiceId || !apiKey) { return NextResponse.json({ error: 'Missing required parameters' }, { status: 400 }) @@ -29,7 +37,15 @@ export async function POST(request: NextRequest) { return NextResponse.json({ error: voiceIdValidation.error }, { status: 400 }) } - logger.info('Proxying TTS request for voice:', voiceId) + // Check if this is an execution context (from workflow tool execution) + const hasExecutionContext = workspaceId && workflowId && executionId + logger.info('Proxying TTS request for voice:', { + voiceId, + hasExecutionContext, + workspaceId, + workflowId, + executionId, + }) const endpoint = `https://api.elevenlabs.io/v1/text-to-speech/${voiceId}` @@ -64,17 +80,52 @@ export async function POST(request: NextRequest) { const audioBuffer = Buffer.from(await audioBlob.arrayBuffer()) const timestamp = Date.now() - const fileName = `elevenlabs-tts-${timestamp}.mp3` + // Use execution storage for workflow tool calls, copilot for chat UI + if (hasExecutionContext) { + const { uploadExecutionFile } = await import('@/lib/uploads/contexts/execution') + const fileName = `tts-${timestamp}.mp3` + + const userFile = await uploadExecutionFile( + { + workspaceId, + workflowId, + executionId, + }, + audioBuffer, + fileName, + 'audio/mpeg', + authResult.userId + ) + + logger.info('TTS audio stored in execution context:', { + executionId, + fileName, + size: userFile.size, + }) + + return NextResponse.json({ + audioFile: userFile, + audioUrl: userFile.url, + }) + } + + // Chat UI usage - no execution context, use copilot context + const fileName = `tts-${timestamp}.mp3` const fileInfo = await StorageService.uploadFile({ file: audioBuffer, fileName, contentType: 'audio/mpeg', - context: 'general', + context: 'copilot', }) const audioUrl = `${getBaseUrl()}${fileInfo.path}` + logger.info('TTS audio stored in copilot context (chat UI):', { + fileName, + size: fileInfo.size, + }) + return NextResponse.json({ audioUrl, size: fileInfo.size, diff --git a/apps/sim/app/api/tools/gmail/add-label/route.ts b/apps/sim/app/api/tools/gmail/add-label/route.ts new file mode 100644 index 000000000..2cfb8c6de --- /dev/null +++ b/apps/sim/app/api/tools/gmail/add-label/route.ts @@ -0,0 +1,117 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailAddLabelAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailAddLabelSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), + labelIds: z.string().min(1, 'At least one label ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail add label attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail add label request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailAddLabelSchema.parse(body) + + logger.info(`[${requestId}] Adding label(s) to Gmail email`, { + messageId: validatedData.messageId, + labelIds: validatedData.labelIds, + }) + + const labelIds = validatedData.labelIds + .split(',') + .map((id) => id.trim()) + .filter((id) => id.length > 0) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + addLabelIds: labelIds, + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Label(s) added successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: `Successfully added ${labelIds.length} label(s) to email`, + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error adding label to Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/archive/route.ts b/apps/sim/app/api/tools/gmail/archive/route.ts new file mode 100644 index 000000000..79af4ff36 --- /dev/null +++ b/apps/sim/app/api/tools/gmail/archive/route.ts @@ -0,0 +1,110 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailArchiveAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailArchiveSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail archive attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail archive request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailArchiveSchema.parse(body) + + logger.info(`[${requestId}] Archiving Gmail email`, { + messageId: validatedData.messageId, + }) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + removeLabelIds: ['INBOX'], + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email archived successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email archived successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error archiving Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/delete/route.ts b/apps/sim/app/api/tools/gmail/delete/route.ts new file mode 100644 index 000000000..7d853ee32 --- /dev/null +++ b/apps/sim/app/api/tools/gmail/delete/route.ts @@ -0,0 +1,107 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailDeleteAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailDeleteSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail delete attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail delete request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailDeleteSchema.parse(body) + + logger.info(`[${requestId}] Deleting Gmail email`, { + messageId: validatedData.messageId, + }) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/trash`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email deleted successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email moved to trash successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/mark-read/route.ts b/apps/sim/app/api/tools/gmail/mark-read/route.ts new file mode 100644 index 000000000..1ef8926ea --- /dev/null +++ b/apps/sim/app/api/tools/gmail/mark-read/route.ts @@ -0,0 +1,110 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailMarkReadAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailMarkReadSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail mark read attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail mark read request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailMarkReadSchema.parse(body) + + logger.info(`[${requestId}] Marking Gmail email as read`, { + messageId: validatedData.messageId, + }) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + removeLabelIds: ['UNREAD'], + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email marked as read successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email marked as read successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error marking Gmail email as read:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/mark-unread/route.ts b/apps/sim/app/api/tools/gmail/mark-unread/route.ts new file mode 100644 index 000000000..7c3d1f081 --- /dev/null +++ b/apps/sim/app/api/tools/gmail/mark-unread/route.ts @@ -0,0 +1,113 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailMarkUnreadAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailMarkUnreadSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail mark unread attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Gmail mark unread request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = GmailMarkUnreadSchema.parse(body) + + logger.info(`[${requestId}] Marking Gmail email as unread`, { + messageId: validatedData.messageId, + }) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + addLabelIds: ['UNREAD'], + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email marked as unread successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email marked as unread successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error marking Gmail email as unread:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/move/route.ts b/apps/sim/app/api/tools/gmail/move/route.ts new file mode 100644 index 000000000..57a8369ba --- /dev/null +++ b/apps/sim/app/api/tools/gmail/move/route.ts @@ -0,0 +1,134 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailMoveAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailMoveSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), + addLabelIds: z.string().min(1, 'At least one label to add is required'), + removeLabelIds: z.string().optional().nullable(), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail move attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail move request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailMoveSchema.parse(body) + + logger.info(`[${requestId}] Moving Gmail email`, { + messageId: validatedData.messageId, + addLabelIds: validatedData.addLabelIds, + removeLabelIds: validatedData.removeLabelIds, + }) + + const addLabelIds = validatedData.addLabelIds + .split(',') + .map((id) => id.trim()) + .filter((id) => id.length > 0) + + const removeLabelIds = validatedData.removeLabelIds + ? validatedData.removeLabelIds + .split(',') + .map((id) => id.trim()) + .filter((id) => id.length > 0) + : [] + + const modifyBody: { addLabelIds?: string[]; removeLabelIds?: string[] } = {} + + if (addLabelIds.length > 0) { + modifyBody.addLabelIds = addLabelIds + } + + if (removeLabelIds.length > 0) { + modifyBody.removeLabelIds = removeLabelIds + } + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify(modifyBody), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email moved successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email moved successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error moving Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/remove-label/route.ts b/apps/sim/app/api/tools/gmail/remove-label/route.ts new file mode 100644 index 000000000..8978e7982 --- /dev/null +++ b/apps/sim/app/api/tools/gmail/remove-label/route.ts @@ -0,0 +1,120 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailRemoveLabelAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailRemoveLabelSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), + labelIds: z.string().min(1, 'At least one label ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail remove label attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Gmail remove label request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = GmailRemoveLabelSchema.parse(body) + + logger.info(`[${requestId}] Removing label(s) from Gmail email`, { + messageId: validatedData.messageId, + labelIds: validatedData.labelIds, + }) + + const labelIds = validatedData.labelIds + .split(',') + .map((id) => id.trim()) + .filter((id) => id.length > 0) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + removeLabelIds: labelIds, + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Label(s) removed successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: `Successfully removed ${labelIds.length} label(s) from email`, + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error removing label from Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/gmail/unarchive/route.ts b/apps/sim/app/api/tools/gmail/unarchive/route.ts new file mode 100644 index 000000000..4e0768657 --- /dev/null +++ b/apps/sim/app/api/tools/gmail/unarchive/route.ts @@ -0,0 +1,110 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('GmailUnarchiveAPI') + +const GMAIL_API_BASE = 'https://gmail.googleapis.com/gmail/v1/users/me' + +const GmailUnarchiveSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Gmail unarchive attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Gmail unarchive request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = GmailUnarchiveSchema.parse(body) + + logger.info(`[${requestId}] Unarchiving Gmail email`, { + messageId: validatedData.messageId, + }) + + const gmailResponse = await fetch( + `${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`, + { + method: 'POST', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + addLabelIds: ['INBOX'], + }), + } + ) + + if (!gmailResponse.ok) { + const errorText = await gmailResponse.text() + logger.error(`[${requestId}] Gmail API error:`, errorText) + return NextResponse.json( + { + success: false, + error: `Gmail API error: ${gmailResponse.statusText}`, + }, + { status: gmailResponse.status } + ) + } + + const data = await gmailResponse.json() + + logger.info(`[${requestId}] Email unarchived successfully`, { messageId: data.id }) + + return NextResponse.json({ + success: true, + output: { + content: 'Email moved back to inbox successfully', + metadata: { + id: data.id, + threadId: data.threadId, + labelIds: data.labelIds, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error unarchiving Gmail email:`, error) + + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Internal server error', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/onedrive/files/route.ts b/apps/sim/app/api/tools/onedrive/files/route.ts new file mode 100644 index 000000000..0d916410f --- /dev/null +++ b/apps/sim/app/api/tools/onedrive/files/route.ts @@ -0,0 +1,165 @@ +import { randomUUID } from 'crypto' +import { db } from '@sim/db' +import { account } from '@sim/db/schema' +import { eq } from 'drizzle-orm' +import { type NextRequest, NextResponse } from 'next/server' +import { getSession } from '@/lib/auth' +import { createLogger } from '@/lib/logs/console/logger' +import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OneDriveFilesAPI') + +import type { MicrosoftGraphDriveItem } from '@/tools/onedrive/types' + +/** + * Get files (not folders) from Microsoft OneDrive + */ +export async function GET(request: NextRequest) { + const requestId = randomUUID().slice(0, 8) + logger.info(`[${requestId}] OneDrive files request received`) + + try { + const session = await getSession() + if (!session?.user?.id) { + logger.warn(`[${requestId}] Unauthenticated request rejected`) + return NextResponse.json({ error: 'User not authenticated' }, { status: 401 }) + } + + const { searchParams } = new URL(request.url) + const credentialId = searchParams.get('credentialId') + const query = searchParams.get('query') || '' + + if (!credentialId) { + logger.warn(`[${requestId}] Missing credential ID`) + return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 }) + } + + logger.info(`[${requestId}] Fetching credential`, { credentialId }) + + const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1) + if (!credentials.length) { + logger.warn(`[${requestId}] Credential not found`, { credentialId }) + return NextResponse.json({ error: 'Credential not found' }, { status: 404 }) + } + + const credential = credentials[0] + if (credential.userId !== session.user.id) { + logger.warn(`[${requestId}] Unauthorized credential access attempt`, { + credentialUserId: credential.userId, + requestUserId: session.user.id, + }) + return NextResponse.json({ error: 'Unauthorized' }, { status: 403 }) + } + + const accessToken = await refreshAccessTokenIfNeeded(credentialId, session.user.id, requestId) + if (!accessToken) { + logger.error(`[${requestId}] Failed to obtain valid access token`) + return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 }) + } + + // Use search endpoint if query provided, otherwise list root children + // Microsoft Graph API doesn't support $filter on file/folder properties for /children endpoint + let url: string + if (query) { + // Use search endpoint with query + const searchParams_new = new URLSearchParams() + searchParams_new.append( + '$select', + 'id,name,file,webUrl,size,createdDateTime,lastModifiedDateTime,createdBy,thumbnails' + ) + searchParams_new.append('$top', '50') + url = `https://graph.microsoft.com/v1.0/me/drive/root/search(q='${encodeURIComponent(query)}')?${searchParams_new.toString()}` + } else { + // List all children (files and folders) from root + const searchParams_new = new URLSearchParams() + searchParams_new.append( + '$select', + 'id,name,file,folder,webUrl,size,createdDateTime,lastModifiedDateTime,createdBy,thumbnails' + ) + searchParams_new.append('$top', '50') + url = `https://graph.microsoft.com/v1.0/me/drive/root/children?${searchParams_new.toString()}` + } + + logger.info(`[${requestId}] Fetching files from Microsoft Graph`, { url }) + + const response = await fetch(url, { + headers: { + Authorization: `Bearer ${accessToken}`, + }, + }) + + if (!response.ok) { + const errorData = await response.json().catch(() => ({ error: { message: 'Unknown error' } })) + logger.error(`[${requestId}] Microsoft Graph API error`, { + status: response.status, + error: errorData.error?.message || 'Failed to fetch files from OneDrive', + }) + return NextResponse.json( + { error: errorData.error?.message || 'Failed to fetch files from OneDrive' }, + { status: response.status } + ) + } + + const data = await response.json() + logger.info(`[${requestId}] Received ${data.value?.length || 0} items from Microsoft Graph`) + + // Log what we received to debug filtering + const itemBreakdown = (data.value || []).reduce( + (acc: any, item: MicrosoftGraphDriveItem) => { + if (item.file) acc.files++ + if (item.folder) acc.folders++ + return acc + }, + { files: 0, folders: 0 } + ) + logger.info(`[${requestId}] Item breakdown`, itemBreakdown) + + const files = (data.value || []) + .filter((item: MicrosoftGraphDriveItem) => { + const isFile = !!item.file && !item.folder + if (!isFile) { + logger.debug( + `[${requestId}] Filtering out item: ${item.name} (isFolder: ${!!item.folder})` + ) + } + return isFile + }) + .map((file: MicrosoftGraphDriveItem) => ({ + id: file.id, + name: file.name, + mimeType: file.file?.mimeType || 'application/octet-stream', + iconLink: file.thumbnails?.[0]?.small?.url, + webViewLink: file.webUrl, + thumbnailLink: file.thumbnails?.[0]?.medium?.url, + createdTime: file.createdDateTime, + modifiedTime: file.lastModifiedDateTime, + size: file.size?.toString(), + owners: file.createdBy + ? [ + { + displayName: file.createdBy.user?.displayName || 'Unknown', + emailAddress: file.createdBy.user?.email || '', + }, + ] + : [], + })) + + logger.info( + `[${requestId}] Returning ${files.length} files (filtered from ${data.value?.length || 0} items)` + ) + + // Log the file IDs we're returning + if (files.length > 0) { + logger.info(`[${requestId}] File IDs being returned:`, { + fileIds: files.slice(0, 5).map((f: any) => ({ id: f.id, name: f.name })), + }) + } + + return NextResponse.json({ files }, { status: 200 }) + } catch (error) { + logger.error(`[${requestId}] Error fetching files from OneDrive`, error) + return NextResponse.json({ error: 'Internal server error' }, { status: 500 }) + } +} diff --git a/apps/sim/app/api/tools/outlook/copy/route.ts b/apps/sim/app/api/tools/outlook/copy/route.ts new file mode 100644 index 000000000..b805e6d4e --- /dev/null +++ b/apps/sim/app/api/tools/outlook/copy/route.ts @@ -0,0 +1,112 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OutlookCopyAPI') + +const OutlookCopySchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), + destinationId: z.string().min(1, 'Destination folder ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Outlook copy attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Outlook copy request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = OutlookCopySchema.parse(body) + + logger.info(`[${requestId}] Copying Outlook email`, { + messageId: validatedData.messageId, + destinationId: validatedData.destinationId, + }) + + const graphEndpoint = `https://graph.microsoft.com/v1.0/me/messages/${validatedData.messageId}/copy` + + logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`) + + const graphResponse = await fetch(graphEndpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + destinationId: validatedData.destinationId, + }), + }) + + if (!graphResponse.ok) { + const errorData = await graphResponse.json().catch(() => ({})) + logger.error(`[${requestId}] Microsoft Graph API error:`, errorData) + return NextResponse.json( + { + success: false, + error: errorData.error?.message || 'Failed to copy email', + }, + { status: graphResponse.status } + ) + } + + const responseData = await graphResponse.json() + + logger.info(`[${requestId}] Email copied successfully`, { + originalMessageId: validatedData.messageId, + copiedMessageId: responseData.id, + destinationFolderId: responseData.parentFolderId, + }) + + return NextResponse.json({ + success: true, + output: { + message: 'Email copied successfully', + originalMessageId: validatedData.messageId, + copiedMessageId: responseData.id, + destinationFolderId: responseData.parentFolderId, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error copying Outlook email:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/outlook/delete/route.ts b/apps/sim/app/api/tools/outlook/delete/route.ts new file mode 100644 index 000000000..495a49bde --- /dev/null +++ b/apps/sim/app/api/tools/outlook/delete/route.ts @@ -0,0 +1,101 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OutlookDeleteAPI') + +const OutlookDeleteSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Outlook delete attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Outlook delete request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = OutlookDeleteSchema.parse(body) + + logger.info(`[${requestId}] Deleting Outlook email`, { + messageId: validatedData.messageId, + }) + + const graphEndpoint = `https://graph.microsoft.com/v1.0/me/messages/${validatedData.messageId}` + + logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`) + + const graphResponse = await fetch(graphEndpoint, { + method: 'DELETE', + headers: { + Authorization: `Bearer ${validatedData.accessToken}`, + }, + }) + + if (!graphResponse.ok) { + const errorData = await graphResponse.json().catch(() => ({})) + logger.error(`[${requestId}] Microsoft Graph API error:`, errorData) + return NextResponse.json( + { + success: false, + error: errorData.error?.message || 'Failed to delete email', + }, + { status: graphResponse.status } + ) + } + + logger.info(`[${requestId}] Email deleted successfully`, { + messageId: validatedData.messageId, + }) + + return NextResponse.json({ + success: true, + output: { + message: 'Email moved to Deleted Items successfully', + messageId: validatedData.messageId, + status: 'deleted', + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting Outlook email:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/outlook/mark-read/route.ts b/apps/sim/app/api/tools/outlook/mark-read/route.ts new file mode 100644 index 000000000..8186e2774 --- /dev/null +++ b/apps/sim/app/api/tools/outlook/mark-read/route.ts @@ -0,0 +1,111 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OutlookMarkReadAPI') + +const OutlookMarkReadSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Outlook mark read attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Outlook mark read request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = OutlookMarkReadSchema.parse(body) + + logger.info(`[${requestId}] Marking Outlook email as read`, { + messageId: validatedData.messageId, + }) + + const graphEndpoint = `https://graph.microsoft.com/v1.0/me/messages/${validatedData.messageId}` + + logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`) + + const graphResponse = await fetch(graphEndpoint, { + method: 'PATCH', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + isRead: true, + }), + }) + + if (!graphResponse.ok) { + const errorData = await graphResponse.json().catch(() => ({})) + logger.error(`[${requestId}] Microsoft Graph API error:`, errorData) + return NextResponse.json( + { + success: false, + error: errorData.error?.message || 'Failed to mark email as read', + }, + { status: graphResponse.status } + ) + } + + const responseData = await graphResponse.json() + + logger.info(`[${requestId}] Email marked as read successfully`, { + messageId: responseData.id, + isRead: responseData.isRead, + }) + + return NextResponse.json({ + success: true, + output: { + message: 'Email marked as read successfully', + messageId: responseData.id, + isRead: responseData.isRead, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error marking Outlook email as read:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/outlook/mark-unread/route.ts b/apps/sim/app/api/tools/outlook/mark-unread/route.ts new file mode 100644 index 000000000..c4921f2df --- /dev/null +++ b/apps/sim/app/api/tools/outlook/mark-unread/route.ts @@ -0,0 +1,111 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OutlookMarkUnreadAPI') + +const OutlookMarkUnreadSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Outlook mark unread attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Outlook mark unread request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = OutlookMarkUnreadSchema.parse(body) + + logger.info(`[${requestId}] Marking Outlook email as unread`, { + messageId: validatedData.messageId, + }) + + const graphEndpoint = `https://graph.microsoft.com/v1.0/me/messages/${validatedData.messageId}` + + logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`) + + const graphResponse = await fetch(graphEndpoint, { + method: 'PATCH', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + isRead: false, + }), + }) + + if (!graphResponse.ok) { + const errorData = await graphResponse.json().catch(() => ({})) + logger.error(`[${requestId}] Microsoft Graph API error:`, errorData) + return NextResponse.json( + { + success: false, + error: errorData.error?.message || 'Failed to mark email as unread', + }, + { status: graphResponse.status } + ) + } + + const responseData = await graphResponse.json() + + logger.info(`[${requestId}] Email marked as unread successfully`, { + messageId: responseData.id, + isRead: responseData.isRead, + }) + + return NextResponse.json({ + success: true, + output: { + message: 'Email marked as unread successfully', + messageId: responseData.id, + isRead: responseData.isRead, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error marking Outlook email as unread:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/outlook/move/route.ts b/apps/sim/app/api/tools/outlook/move/route.ts new file mode 100644 index 000000000..f5bb63335 --- /dev/null +++ b/apps/sim/app/api/tools/outlook/move/route.ts @@ -0,0 +1,110 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('OutlookMoveAPI') + +const OutlookMoveSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + messageId: z.string().min(1, 'Message ID is required'), + destinationId: z.string().min(1, 'Destination folder ID is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Outlook move attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info(`[${requestId}] Authenticated Outlook move request via ${authResult.authType}`, { + userId: authResult.userId, + }) + + const body = await request.json() + const validatedData = OutlookMoveSchema.parse(body) + + logger.info(`[${requestId}] Moving Outlook email`, { + messageId: validatedData.messageId, + destinationId: validatedData.destinationId, + }) + + const graphEndpoint = `https://graph.microsoft.com/v1.0/me/messages/${validatedData.messageId}/move` + + logger.info(`[${requestId}] Sending to Microsoft Graph API: ${graphEndpoint}`) + + const graphResponse = await fetch(graphEndpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + destinationId: validatedData.destinationId, + }), + }) + + if (!graphResponse.ok) { + const errorData = await graphResponse.json().catch(() => ({})) + logger.error(`[${requestId}] Microsoft Graph API error:`, errorData) + return NextResponse.json( + { + success: false, + error: errorData.error?.message || 'Failed to move email', + }, + { status: graphResponse.status } + ) + } + + const responseData = await graphResponse.json() + + logger.info(`[${requestId}] Email moved successfully`, { + messageId: responseData.id, + parentFolderId: responseData.parentFolderId, + }) + + return NextResponse.json({ + success: true, + output: { + message: 'Email moved successfully', + messageId: responseData.id, + newFolderId: responseData.parentFolderId, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error moving Outlook email:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/slack/add-reaction/route.ts b/apps/sim/app/api/tools/slack/add-reaction/route.ts new file mode 100644 index 000000000..00de3e243 --- /dev/null +++ b/apps/sim/app/api/tools/slack/add-reaction/route.ts @@ -0,0 +1,116 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('SlackAddReactionAPI') + +const SlackAddReactionSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + channel: z.string().min(1, 'Channel ID is required'), + timestamp: z.string().min(1, 'Message timestamp is required'), + name: z.string().min(1, 'Emoji name is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Slack add reaction attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Slack add reaction request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = SlackAddReactionSchema.parse(body) + + logger.info(`[${requestId}] Adding Slack reaction`, { + channel: validatedData.channel, + timestamp: validatedData.timestamp, + emoji: validatedData.name, + }) + + const slackResponse = await fetch('https://slack.com/api/reactions.add', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + channel: validatedData.channel, + timestamp: validatedData.timestamp, + name: validatedData.name, + }), + }) + + const data = await slackResponse.json() + + if (!data.ok) { + logger.error(`[${requestId}] Slack API error:`, data) + return NextResponse.json( + { + success: false, + error: data.error || 'Failed to add reaction', + }, + { status: slackResponse.status } + ) + } + + logger.info(`[${requestId}] Reaction added successfully`, { + channel: validatedData.channel, + timestamp: validatedData.timestamp, + reaction: validatedData.name, + }) + + return NextResponse.json({ + success: true, + output: { + content: `Successfully added :${validatedData.name}: reaction`, + metadata: { + channel: validatedData.channel, + timestamp: validatedData.timestamp, + reaction: validatedData.name, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error adding Slack reaction:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/slack/delete-message/route.ts b/apps/sim/app/api/tools/slack/delete-message/route.ts new file mode 100644 index 000000000..eb1c202f2 --- /dev/null +++ b/apps/sim/app/api/tools/slack/delete-message/route.ts @@ -0,0 +1,111 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('SlackDeleteMessageAPI') + +const SlackDeleteMessageSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + channel: z.string().min(1, 'Channel ID is required'), + timestamp: z.string().min(1, 'Message timestamp is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Slack delete message attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Slack delete message request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = SlackDeleteMessageSchema.parse(body) + + logger.info(`[${requestId}] Deleting Slack message`, { + channel: validatedData.channel, + timestamp: validatedData.timestamp, + }) + + const slackResponse = await fetch('https://slack.com/api/chat.delete', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + channel: validatedData.channel, + ts: validatedData.timestamp, + }), + }) + + const data = await slackResponse.json() + + if (!data.ok) { + logger.error(`[${requestId}] Slack API error:`, data) + return NextResponse.json( + { + success: false, + error: data.error || 'Failed to delete message', + }, + { status: slackResponse.status } + ) + } + + logger.info(`[${requestId}] Message deleted successfully`, { + channel: data.channel, + timestamp: data.ts, + }) + + return NextResponse.json({ + success: true, + output: { + content: 'Message deleted successfully', + metadata: { + channel: data.channel, + timestamp: data.ts, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error deleting Slack message:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/tools/slack/send-message/route.ts b/apps/sim/app/api/tools/slack/send-message/route.ts index b5b65cb9e..2470a685e 100644 --- a/apps/sim/app/api/tools/slack/send-message/route.ts +++ b/apps/sim/app/api/tools/slack/send-message/route.ts @@ -14,6 +14,7 @@ const SlackSendMessageSchema = z.object({ accessToken: z.string().min(1, 'Access token is required'), channel: z.string().min(1, 'Channel is required'), text: z.string().min(1, 'Message text is required'), + thread_ts: z.string().optional().nullable(), files: z.array(z.any()).optional().nullable(), }) @@ -59,6 +60,7 @@ export async function POST(request: NextRequest) { body: JSON.stringify({ channel: validatedData.channel, text: validatedData.text, + ...(validatedData.thread_ts && { thread_ts: validatedData.thread_ts }), }), }) @@ -100,6 +102,7 @@ export async function POST(request: NextRequest) { body: JSON.stringify({ channel: validatedData.channel, text: validatedData.text, + ...(validatedData.thread_ts && { thread_ts: validatedData.thread_ts }), }), }) @@ -166,6 +169,7 @@ export async function POST(request: NextRequest) { body: JSON.stringify({ channel: validatedData.channel, text: validatedData.text, + ...(validatedData.thread_ts && { thread_ts: validatedData.thread_ts }), }), }) diff --git a/apps/sim/app/api/tools/slack/update-message/route.ts b/apps/sim/app/api/tools/slack/update-message/route.ts new file mode 100644 index 000000000..205611394 --- /dev/null +++ b/apps/sim/app/api/tools/slack/update-message/route.ts @@ -0,0 +1,114 @@ +import { type NextRequest, NextResponse } from 'next/server' +import { z } from 'zod' +import { checkHybridAuth } from '@/lib/auth/hybrid' +import { createLogger } from '@/lib/logs/console/logger' +import { generateRequestId } from '@/lib/utils' + +export const dynamic = 'force-dynamic' + +const logger = createLogger('SlackUpdateMessageAPI') + +const SlackUpdateMessageSchema = z.object({ + accessToken: z.string().min(1, 'Access token is required'), + channel: z.string().min(1, 'Channel ID is required'), + timestamp: z.string().min(1, 'Message timestamp is required'), + text: z.string().min(1, 'Message text is required'), +}) + +export async function POST(request: NextRequest) { + const requestId = generateRequestId() + + try { + const authResult = await checkHybridAuth(request, { requireWorkflowId: false }) + + if (!authResult.success) { + logger.warn(`[${requestId}] Unauthorized Slack update message attempt: ${authResult.error}`) + return NextResponse.json( + { + success: false, + error: authResult.error || 'Authentication required', + }, + { status: 401 } + ) + } + + logger.info( + `[${requestId}] Authenticated Slack update message request via ${authResult.authType}`, + { + userId: authResult.userId, + } + ) + + const body = await request.json() + const validatedData = SlackUpdateMessageSchema.parse(body) + + logger.info(`[${requestId}] Updating Slack message`, { + channel: validatedData.channel, + timestamp: validatedData.timestamp, + }) + + const slackResponse = await fetch('https://slack.com/api/chat.update', { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${validatedData.accessToken}`, + }, + body: JSON.stringify({ + channel: validatedData.channel, + ts: validatedData.timestamp, + text: validatedData.text, + }), + }) + + const data = await slackResponse.json() + + if (!data.ok) { + logger.error(`[${requestId}] Slack API error:`, data) + return NextResponse.json( + { + success: false, + error: data.error || 'Failed to update message', + }, + { status: slackResponse.status } + ) + } + + logger.info(`[${requestId}] Message updated successfully`, { + channel: data.channel, + timestamp: data.ts, + }) + + return NextResponse.json({ + success: true, + output: { + content: 'Message updated successfully', + metadata: { + channel: data.channel, + timestamp: data.ts, + text: data.text, + }, + }, + }) + } catch (error) { + if (error instanceof z.ZodError) { + logger.warn(`[${requestId}] Invalid request data`, { errors: error.errors }) + return NextResponse.json( + { + success: false, + error: 'Invalid request data', + details: error.errors, + }, + { status: 400 } + ) + } + + logger.error(`[${requestId}] Error updating Slack message:`, error) + return NextResponse.json( + { + success: false, + error: error instanceof Error ? error.message : 'Unknown error occurred', + }, + { status: 500 } + ) + } +} diff --git a/apps/sim/app/api/workflows/[id]/execute/route.ts b/apps/sim/app/api/workflows/[id]/execute/route.ts index e80cbd6ec..aa7da7b37 100644 --- a/apps/sim/app/api/workflows/[id]/execute/route.ts +++ b/apps/sim/app/api/workflows/[id]/execute/route.ts @@ -2,14 +2,20 @@ import { type NextRequest, NextResponse } from 'next/server' import { v4 as uuidv4 } from 'uuid' import { checkHybridAuth } from '@/lib/auth/hybrid' import { checkServerSideUsageLimits } from '@/lib/billing' +import { processInputFileFields } from '@/lib/execution/files' import { createLogger } from '@/lib/logs/console/logger' import { LoggingSession } from '@/lib/logs/execution/logging-session' import { generateRequestId, SSE_HEADERS } from '@/lib/utils' +import { + loadDeployedWorkflowState, + loadWorkflowFromNormalizedTables, +} from '@/lib/workflows/db-helpers' import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core' import { type ExecutionEvent, encodeSSEEvent } from '@/lib/workflows/executor/execution-events' import { validateWorkflowAccess } from '@/app/api/workflows/middleware' import { type ExecutionMetadata, ExecutionSnapshot } from '@/executor/execution/snapshot' import type { StreamingExecution } from '@/executor/types' +import { Serializer } from '@/serializer' import type { SubflowType } from '@/stores/workflows/workflow/types' const logger = createLogger('WorkflowExecuteAPI') @@ -279,6 +285,61 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: ) } + // Process file fields in workflow input (base64/URL to UserFile conversion) + let processedInput = input + try { + const workflowData = shouldUseDraftState + ? await loadWorkflowFromNormalizedTables(workflowId) + : await loadDeployedWorkflowState(workflowId) + + if (workflowData) { + const serializedWorkflow = new Serializer().serializeWorkflow( + workflowData.blocks, + workflowData.edges, + workflowData.loops, + workflowData.parallels, + false + ) + + const executionContext = { + workspaceId: workflow.workspaceId || '', + workflowId, + executionId, + } + + processedInput = await processInputFileFields( + input, + serializedWorkflow.blocks, + executionContext, + requestId, + userId + ) + } + } catch (fileError) { + logger.error(`[${requestId}] Failed to process input file fields:`, fileError) + + await loggingSession.safeStart({ + userId, + workspaceId: workflow.workspaceId || '', + variables: {}, + }) + + await loggingSession.safeCompleteWithError({ + error: { + message: `File processing failed: ${fileError instanceof Error ? fileError.message : 'Unable to process input files'}`, + stackTrace: fileError instanceof Error ? fileError.stack : undefined, + }, + traceSpans: [], + }) + + return NextResponse.json( + { + error: `File processing failed: ${fileError instanceof Error ? fileError.message : 'Unable to process input files'}`, + }, + { status: 400 } + ) + } + if (!enableSSE) { logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`) try { @@ -296,7 +357,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const snapshot = new ExecutionSnapshot( metadata, workflow, - input, + processedInput, {}, workflow.variables || {}, selectedOutputs @@ -525,7 +586,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id: const snapshot = new ExecutionSnapshot( metadata, workflow, - input, + processedInput, {}, workflow.variables || {}, selectedOutputs diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx index 8ed71f21b..6de19f44e 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/sidebar/sidebar.tsx @@ -7,7 +7,6 @@ import { CopyButton } from '@/components/ui/copy-button' import { ScrollArea } from '@/components/ui/scroll-area' import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip' import { BASE_EXECUTION_CHARGE } from '@/lib/billing/constants' -import { redactApiKeys } from '@/lib/utils' import { FrozenCanvasModal } from '@/app/workspace/[workspaceId]/logs/components/frozen-canvas/frozen-canvas-modal' import { FileDownload } from '@/app/workspace/[workspaceId]/logs/components/sidebar/components/file-download' import LogMarkdownRenderer from '@/app/workspace/[workspaceId]/logs/components/sidebar/components/markdown-renderer' @@ -99,17 +98,17 @@ const BlockContentDisplay = ({ }) => { const [activeTab, setActiveTab] = useState<'output' | 'input'>(blockInput ? 'output' : 'output') - const redactedBlockInput = useMemo(() => { - return blockInput ? redactApiKeys(blockInput) : undefined + const blockInputString = useMemo(() => { + if (!blockInput) return undefined + return JSON.stringify(blockInput, null, 2) }, [blockInput]) - const redactedOutput = useMemo(() => { + const outputString = useMemo(() => { if (!isJson) return formatted try { const parsedOutput = JSON.parse(formatted) - const redactedJson = redactApiKeys(parsedOutput) - return JSON.stringify(redactedJson, null, 2) + return JSON.stringify(parsedOutput, null, 2) } catch (_e) { return formatted } @@ -120,7 +119,7 @@ const BlockContentDisplay = ({
{systemComment}
{/* Tabs for switching between output and input */} - {redactedBlockInput && ( + {blockInputString && (
) diff --git a/apps/sim/app/workspace/[workspaceId]/logs/components/tool-calls/tool-calls-display.tsx b/apps/sim/app/workspace/[workspaceId]/logs/components/tool-calls/tool-calls-display.tsx index 0696b1f5f..83151c99a 100644 --- a/apps/sim/app/workspace/[workspaceId]/logs/components/tool-calls/tool-calls-display.tsx +++ b/apps/sim/app/workspace/[workspaceId]/logs/components/tool-calls/tool-calls-display.tsx @@ -1,6 +1,6 @@ 'use client' -import { useState } from 'react' +import { useMemo, useState } from 'react' import { AlertCircle, CheckCircle2, ChevronDown, ChevronRight, Clock } from 'lucide-react' import { CopyButton } from '@/components/ui/copy-button' import { cn } from '@/lib/utils' @@ -34,6 +34,15 @@ interface ToolCallItemProps { function ToolCallItem({ toolCall, index }: ToolCallItemProps) { const [expanded, setExpanded] = useState(false) + const inputString = useMemo( + () => (toolCall.input ? JSON.stringify(toolCall.input, null, 2) : null), + [toolCall.input] + ) + const outputString = useMemo( + () => (toolCall.output ? JSON.stringify(toolCall.output, null, 2) : null), + [toolCall.output] + ) + // Always show exact milliseconds for duration const formattedDuration = toolCall.duration ? `${toolCall.duration}ms` : 'N/A' @@ -108,23 +117,23 @@ function ToolCallItem({ toolCall, index }: ToolCallItemProps) { {/* Input */} - {toolCall.input && ( + {inputString && (
Input
-                  
-                  {JSON.stringify(toolCall.input, null, 2)}
+                  
+                  {inputString}
                 
)} {/* Output or Error */} - {toolCall.status === 'success' && toolCall.output && ( + {toolCall.status === 'success' && outputString && (
Output
-                  
-                  {JSON.stringify(toolCall.output, null, 2)}
+                  
+                  {outputString}
                 
)} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/console/components/json-view/json-view.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/console/components/json-view/json-view.tsx index 7cfc80294..fc282e123 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/console/components/json-view/json-view.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/console/components/json-view/json-view.tsx @@ -1,6 +1,6 @@ import { useEffect, useState } from 'react' import { Button } from '@/components/ui/button' -import { redactApiKeys } from '@/lib/utils' +import { filterForDisplay, redactApiKeys } from '@/lib/utils' interface JSONViewProps { data: any @@ -155,8 +155,8 @@ export const JSONView = ({ data }: JSONViewProps) => { y: number } | null>(null) - // Apply redaction to the data before displaying - const redactedData = redactApiKeys(data) + const filteredData = filterForDisplay(data) + const redactedData = redactApiKeys(filteredData) const handleContextMenu = (e: React.MouseEvent) => { e.preventDefault() diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx index 087761253..267b9817e 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx @@ -31,20 +31,19 @@ export interface OAuthRequiredModalProps { serviceId?: string } -// Map of OAuth scopes to user-friendly descriptions const SCOPE_DESCRIPTIONS: Record = { 'https://www.googleapis.com/auth/gmail.send': 'Send emails on your behalf', 'https://www.googleapis.com/auth/gmail.labels': 'View and manage your email labels', 'https://www.googleapis.com/auth/gmail.modify': 'View and manage your email messages', - // 'https://www.googleapis.com/auth/gmail.readonly': 'View and read your email messages', - // 'https://www.googleapis.com/auth/drive': 'View and manage your Google Drive files', + 'https://www.googleapis.com/auth/gmail.readonly': 'View and read your email messages', 'https://www.googleapis.com/auth/drive.readonly': 'View and read your Google Drive files', 'https://www.googleapis.com/auth/drive.file': 'View and manage your Google Drive files', - // 'https://www.googleapis.com/auth/documents': 'View and manage your Google Docs', 'https://www.googleapis.com/auth/calendar': 'View and manage your calendar', 'https://www.googleapis.com/auth/userinfo.email': 'View your email address', 'https://www.googleapis.com/auth/userinfo.profile': 'View your basic profile info', 'https://www.googleapis.com/auth/forms.responses.readonly': 'View responses to your Google Forms', + 'https://www.googleapis.com/auth/ediscovery': 'Access Google Vault for eDiscovery', + 'https://www.googleapis.com/auth/devstorage.read_only': 'Read files from Google Cloud Storage', 'read:page:confluence': 'Read Confluence pages', 'write:page:confluence': 'Write Confluence pages', 'read:me': 'Read your profile information', @@ -101,11 +100,24 @@ const SCOPE_DESCRIPTIONS: Record = { 'Mail.ReadBasic': 'Read your Microsoft emails', 'Mail.Read': 'Read your Microsoft emails', 'Mail.Send': 'Send emails on your behalf', + 'Files.Read': 'Read your OneDrive files', + 'Files.ReadWrite': 'Read and write your OneDrive files', + 'ChannelMember.Read.All': 'Read team channel members', + 'Tasks.ReadWrite': 'Read and manage your Planner tasks', + 'Sites.Read.All': 'Read Sharepoint sites', + 'Sites.ReadWrite.All': 'Read and write Sharepoint sites', + 'Sites.Manage.All': 'Manage Sharepoint sites', + openid: 'Standard authentication', + profile: 'Access your profile information', + email: 'Access your email address', identify: 'Read your Discord user', bot: 'Read your Discord bot', 'messages.read': 'Read your Discord messages', guilds: 'Read your Discord guilds', 'guilds.members.read': 'Read your Discord guild members', + identity: 'Access your Reddit identity', + login: 'Access your Wealthbox account', + data: 'Access your Wealthbox data', read: 'Read access to your workspace', write: 'Write access to your Linear workspace', 'channels:read': 'View public channels', @@ -116,14 +128,15 @@ const SCOPE_DESCRIPTIONS: Record = { 'chat:write.public': 'Post to public channels', 'users:read': 'View workspace users', 'files:write': 'Upload files', + 'files:read': 'Download and read files', 'canvases:write': 'Create canvas documents', + 'reactions:write': 'Add emoji reactions to messages', 'sites:read': 'View your Webflow sites', 'sites:write': 'Manage webhooks and site settings', 'cms:read': 'View your CMS content', 'cms:write': 'Manage your CMS content', } -// Convert OAuth scope to user-friendly description function getScopeDescription(scope: string): string { return SCOPE_DESCRIPTIONS[scope] || scope } @@ -136,16 +149,13 @@ export function OAuthRequiredModal({ requiredScopes = [], serviceId, }: OAuthRequiredModalProps) { - // Get provider configuration and service const effectiveServiceId = serviceId || getServiceIdFromScopes(provider, requiredScopes) const { baseProvider } = parseProvider(provider) const baseProviderConfig = OAUTH_PROVIDERS[baseProvider] - // Default to base provider name and icon let providerName = baseProviderConfig?.name || provider let ProviderIcon = baseProviderConfig?.icon || (() => null) - // Try to find the specific service if (baseProviderConfig) { for (const service of Object.values(baseProviderConfig.services)) { if (service.id === effectiveServiceId || service.providerId === provider) { @@ -156,17 +166,14 @@ export function OAuthRequiredModal({ } } - // Filter out userinfo scopes as they're not relevant to show to users const displayScopes = requiredScopes.filter( (scope) => !scope.includes('userinfo.email') && !scope.includes('userinfo.profile') ) const handleConnectDirectly = async () => { try { - // Determine the appropriate serviceId and providerId const providerId = getProviderIdFromServiceId(effectiveServiceId) - // Close the modal onClose() logger.info('Linking OAuth2:', { diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/components/microsoft-file-selector.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/components/microsoft-file-selector.tsx index 220ccfb23..06e696e2f 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/components/microsoft-file-selector.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/components/microsoft-file-selector.tsx @@ -52,6 +52,7 @@ interface MicrosoftFileSelectorProps { label?: string disabled?: boolean serviceId?: string + mimeType?: string // Filter type: 'file' for files only, 'application/vnd.microsoft.graph.folder' for folders only showPreview?: boolean onFileInfoChange?: (fileInfo: MicrosoftFileInfo | null) => void planId?: string @@ -68,6 +69,7 @@ export function MicrosoftFileSelector({ label = 'Select file', disabled = false, serviceId, + mimeType, showPreview = true, onFileInfoChange, planId, @@ -157,10 +159,15 @@ export function MicrosoftFileSelector({ queryParams.append('query', searchQuery.trim()) } - // Route to correct endpoint based on service + // Route to correct endpoint based on service and mimeType let endpoint: string if (serviceId === 'onedrive') { - endpoint = `/api/tools/onedrive/folders?${queryParams.toString()}` + // Use files endpoint if mimeType is 'file', otherwise use folders endpoint + if (mimeType === 'file') { + endpoint = `/api/tools/onedrive/files?${queryParams.toString()}` + } else { + endpoint = `/api/tools/onedrive/folders?${queryParams.toString()}` + } } else if (serviceId === 'sharepoint') { endpoint = `/api/tools/sharepoint/sites?${queryParams.toString()}` } else { @@ -188,7 +195,7 @@ export function MicrosoftFileSelector({ } finally { setIsLoadingFiles(false) } - }, [selectedCredentialId, searchQuery, serviceId, isForeignCredential]) + }, [selectedCredentialId, searchQuery, serviceId, mimeType, isForeignCredential]) // Fetch a single file by ID when we have a selectedFileId but no metadata const fetchFileById = useCallback( @@ -692,14 +699,18 @@ export function MicrosoftFileSelector({ } const getFileTypeTitleCase = () => { - if (serviceId === 'onedrive') return 'Folders' + if (serviceId === 'onedrive') { + return mimeType === 'file' ? 'Files' : 'Folders' + } if (serviceId === 'sharepoint') return 'Sites' if (serviceId === 'microsoft-planner') return 'Tasks' return 'Excel Files' } const getSearchPlaceholder = () => { - if (serviceId === 'onedrive') return 'Search OneDrive folders...' + if (serviceId === 'onedrive') { + return mimeType === 'file' ? 'Search OneDrive files...' : 'Search OneDrive folders...' + } if (serviceId === 'sharepoint') return 'Search SharePoint sites...' if (serviceId === 'microsoft-planner') return 'Search tasks...' return 'Search Excel files...' @@ -707,6 +718,12 @@ export function MicrosoftFileSelector({ const getEmptyStateText = () => { if (serviceId === 'onedrive') { + if (mimeType === 'file') { + return { + title: 'No files found.', + description: 'No files were found in your OneDrive.', + } + } return { title: 'No folders found.', description: 'No folders were found in your OneDrive.', diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx index 6c5107a33..941b742e6 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx @@ -286,6 +286,7 @@ export function FileSelectorInput({ provider='microsoft' requiredScopes={subBlock.requiredScopes || []} serviceId={subBlock.serviceId} + mimeType={subBlock.mimeType} label={subBlock.placeholder || 'Select OneDrive folder'} disabled={finalDisabled} showPreview={true} diff --git a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx index f3449b60d..4529771a9 100644 --- a/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx +++ b/apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx @@ -1,6 +1,6 @@ 'use client' -import { useEffect, useState } from 'react' +import { useCallback, useEffect, useState } from 'react' import { type FolderInfo, FolderSelector, @@ -33,6 +33,11 @@ export function FolderSelectorInput({ const { activeWorkflowId } = useWorkflowRegistry() const [selectedFolderId, setSelectedFolderId] = useState('') const [_folderInfo, setFolderInfo] = useState(null) + const provider = (subBlock.provider || subBlock.serviceId || 'google-email').toLowerCase() + const isCopyDestinationSelector = + subBlock.canonicalParamId === 'copyDestinationId' || + subBlock.id === 'copyDestinationFolder' || + subBlock.id === 'manualCopyDestinationFolder' const { isForeignCredential } = useForeignCredential( subBlock.provider || subBlock.serviceId || 'outlook', (connectedCredential as string) || '' @@ -54,11 +59,15 @@ export function FolderSelectorInput({ setSelectedFolderId(current) return } - // Set default INBOX if empty - const defaultValue = 'INBOX' - setSelectedFolderId(defaultValue) - if (!isPreview) { - collaborativeSetSubblockValue(blockId, subBlock.id, defaultValue) + const shouldDefaultInbox = provider !== 'outlook' && !isCopyDestinationSelector + if (shouldDefaultInbox) { + const defaultValue = 'INBOX' + setSelectedFolderId(defaultValue) + if (!isPreview) { + collaborativeSetSubblockValue(blockId, subBlock.id, defaultValue) + } + } else { + setSelectedFolderId('') } }, [ blockId, @@ -71,19 +80,22 @@ export function FolderSelectorInput({ ]) // Handle folder selection - const handleFolderChange = (folderId: string, info?: FolderInfo) => { - setSelectedFolderId(folderId) - setFolderInfo(info || null) - if (!isPreview) { - collaborativeSetSubblockValue(blockId, subBlock.id, folderId) - } - } + const handleFolderChange = useCallback( + (folderId: string, info?: FolderInfo) => { + setSelectedFolderId(folderId) + setFolderInfo(info || null) + if (!isPreview) { + collaborativeSetSubblockValue(blockId, subBlock.id, folderId) + } + }, + [blockId, subBlock.id, collaborativeSetSubblockValue, isPreview] + ) return ( => { const testInput: Record = {} + if (Array.isArray(inputFormatValue)) { inputFormatValue.forEach((field: any) => { if (field && typeof field === 'object' && field.name && field.value !== undefined) { @@ -784,6 +786,7 @@ export function useWorkflowExecution() { } }) } + return testInput } diff --git a/apps/sim/background/webhook-execution.ts b/apps/sim/background/webhook-execution.ts index 2dc7a905f..5c2080f4f 100644 --- a/apps/sim/background/webhook-execution.ts +++ b/apps/sim/background/webhook-execution.ts @@ -38,6 +38,7 @@ async function processTriggerFileOutputs( workflowId: string executionId: string requestId: string + userId?: string }, path = '' ): Promise { @@ -178,21 +179,6 @@ async function executeWebhookJobInternal( // Merge subblock states (matching workflow-execution pattern) const mergedStates = mergeSubblockState(blocks, {}) - // Process block states for execution - const processedBlockStates = Object.entries(mergedStates).reduce( - (acc, [blockId, blockState]) => { - acc[blockId] = Object.entries(blockState.subBlocks).reduce( - (subAcc, [key, subBlock]) => { - subAcc[key] = subBlock.value - return subAcc - }, - {} as Record - ) - return acc - }, - {} as Record> - ) - // Create serialized workflow const serializer = new Serializer() const serializedWorkflow = serializer.serializeWorkflow( @@ -263,8 +249,8 @@ async function executeWebhookJobInternal( metadata, workflow, airtableInput, - decryptedEnvVars, - workflow.variables || {}, + {}, + workflowVariables, [] ) @@ -355,21 +341,30 @@ async function executeWebhookJobInternal( if (input && payload.blockId && blocks[payload.blockId]) { try { const triggerBlock = blocks[payload.blockId] - const triggerId = triggerBlock?.subBlocks?.triggerId?.value + const rawSelectedTriggerId = triggerBlock?.subBlocks?.selectedTriggerId?.value + const rawTriggerId = triggerBlock?.subBlocks?.triggerId?.value - if (triggerId && typeof triggerId === 'string' && isTriggerValid(triggerId)) { - const triggerConfig = getTrigger(triggerId) + const resolvedTriggerId = [rawSelectedTriggerId, rawTriggerId].find( + (candidate): candidate is string => + typeof candidate === 'string' && isTriggerValid(candidate) + ) + + if (resolvedTriggerId) { + const triggerConfig = getTrigger(resolvedTriggerId) if (triggerConfig.outputs) { - logger.debug(`[${requestId}] Processing trigger ${triggerId} file outputs`) + logger.debug(`[${requestId}] Processing trigger ${resolvedTriggerId} file outputs`) const processedInput = await processTriggerFileOutputs(input, triggerConfig.outputs, { workspaceId: workspaceId || '', workflowId: payload.workflowId, executionId, requestId, + userId: payload.userId, }) Object.assign(input, processedInput) } + } else { + logger.debug(`[${requestId}] No valid triggerId found for block ${payload.blockId}`) } } catch (error) { logger.error(`[${requestId}] Error processing trigger file outputs:`, error) @@ -449,8 +444,8 @@ async function executeWebhookJobInternal( metadata, workflow, input || {}, - decryptedEnvVars, - workflow.variables || {}, + {}, + workflowVariables, [] ) diff --git a/apps/sim/blocks/blocks/gmail.ts b/apps/sim/blocks/blocks/gmail.ts index 82e42df33..d19694c68 100644 --- a/apps/sim/blocks/blocks/gmail.ts +++ b/apps/sim/blocks/blocks/gmail.ts @@ -7,10 +7,10 @@ import { getTrigger } from '@/triggers' export const GmailBlock: BlockConfig = { type: 'gmail', name: 'Gmail', - description: 'Send Gmail or trigger workflows from Gmail events', + description: 'Send, read, search, and move Gmail messages or trigger workflows from Gmail events', authMode: AuthMode.OAuth, longDescription: - 'Integrate Gmail into the workflow. Can send, read, and search emails. Can be used in trigger mode to trigger a workflow when a new email is received.', + 'Integrate Gmail into the workflow. Can send, read, search, and move emails. Can be used in trigger mode to trigger a workflow when a new email is received.', docsLink: 'https://docs.sim.ai/tools/gmail', category: 'tools', bgColor: '#E0E0E0', @@ -28,6 +28,14 @@ export const GmailBlock: BlockConfig = { { label: 'Read Email', id: 'read_gmail' }, { label: 'Draft Email', id: 'draft_gmail' }, { label: 'Search Email', id: 'search_gmail' }, + { label: 'Move Email', id: 'move_gmail' }, + { label: 'Mark as Read', id: 'mark_read_gmail' }, + { label: 'Mark as Unread', id: 'mark_unread_gmail' }, + { label: 'Archive Email', id: 'archive_gmail' }, + { label: 'Unarchive Email', id: 'unarchive_gmail' }, + { label: 'Delete Email', id: 'delete_gmail' }, + { label: 'Add Label', id: 'add_label_gmail' }, + { label: 'Remove Label', id: 'remove_label_gmail' }, ], value: () => 'send_gmail', }, @@ -219,10 +227,155 @@ export const GmailBlock: BlockConfig = { placeholder: 'Maximum number of results (default: 10)', condition: { field: 'operation', value: ['search_gmail', 'read_gmail'] }, }, + // Move Email Fields + { + id: 'moveMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email to move', + condition: { field: 'operation', value: 'move_gmail' }, + required: true, + }, + // Destination label selector (basic mode) + { + id: 'destinationLabel', + title: 'Move To Label', + type: 'folder-selector', + layout: 'full', + canonicalParamId: 'addLabelIds', + provider: 'google-email', + serviceId: 'gmail', + requiredScopes: [ + 'https://www.googleapis.com/auth/gmail.readonly', + 'https://www.googleapis.com/auth/gmail.labels', + ], + placeholder: 'Select destination label', + dependsOn: ['credential'], + mode: 'basic', + condition: { field: 'operation', value: 'move_gmail' }, + required: true, + }, + // Manual destination label input (advanced mode) + { + id: 'manualDestinationLabel', + title: 'Move To Label', + type: 'short-input', + layout: 'full', + canonicalParamId: 'addLabelIds', + placeholder: 'Enter label ID (e.g., INBOX, Label_123)', + mode: 'advanced', + condition: { field: 'operation', value: 'move_gmail' }, + required: true, + }, + // Source label selector (basic mode) + { + id: 'sourceLabel', + title: 'Remove From Label (Optional)', + type: 'folder-selector', + layout: 'full', + canonicalParamId: 'removeLabelIds', + provider: 'google-email', + serviceId: 'gmail', + requiredScopes: [ + 'https://www.googleapis.com/auth/gmail.readonly', + 'https://www.googleapis.com/auth/gmail.labels', + ], + placeholder: 'Select label to remove', + dependsOn: ['credential'], + mode: 'basic', + condition: { field: 'operation', value: 'move_gmail' }, + required: false, + }, + // Manual source label input (advanced mode) + { + id: 'manualSourceLabel', + title: 'Remove From Label (Optional)', + type: 'short-input', + layout: 'full', + canonicalParamId: 'removeLabelIds', + placeholder: 'Enter label ID to remove (e.g., INBOX)', + mode: 'advanced', + condition: { field: 'operation', value: 'move_gmail' }, + required: false, + }, + // Mark as Read/Unread, Archive/Unarchive, Delete - Message ID field + { + id: 'actionMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email', + condition: { + field: 'operation', + value: [ + 'mark_read_gmail', + 'mark_unread_gmail', + 'archive_gmail', + 'unarchive_gmail', + 'delete_gmail', + ], + }, + required: true, + }, + // Add/Remove Label - Message ID field + { + id: 'labelActionMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email', + condition: { field: 'operation', value: ['add_label_gmail', 'remove_label_gmail'] }, + required: true, + }, + // Add/Remove Label - Label selector (basic mode) + { + id: 'labelManagement', + title: 'Label', + type: 'folder-selector', + layout: 'full', + canonicalParamId: 'labelIds', + provider: 'google-email', + serviceId: 'gmail', + requiredScopes: [ + 'https://www.googleapis.com/auth/gmail.readonly', + 'https://www.googleapis.com/auth/gmail.labels', + ], + placeholder: 'Select label', + dependsOn: ['credential'], + mode: 'basic', + condition: { field: 'operation', value: ['add_label_gmail', 'remove_label_gmail'] }, + required: true, + }, + // Add/Remove Label - Manual label input (advanced mode) + { + id: 'manualLabelManagement', + title: 'Label', + type: 'short-input', + layout: 'full', + canonicalParamId: 'labelIds', + placeholder: 'Enter label ID (e.g., INBOX, Label_123)', + mode: 'advanced', + condition: { field: 'operation', value: ['add_label_gmail', 'remove_label_gmail'] }, + required: true, + }, ...getTrigger('gmail_poller').subBlocks, ], tools: { - access: ['gmail_send', 'gmail_draft', 'gmail_read', 'gmail_search'], + access: [ + 'gmail_send', + 'gmail_draft', + 'gmail_read', + 'gmail_search', + 'gmail_move', + 'gmail_mark_read', + 'gmail_mark_unread', + 'gmail_archive', + 'gmail_unarchive', + 'gmail_delete', + 'gmail_add_label', + 'gmail_remove_label', + ], config: { tool: (params) => { switch (params.operation) { @@ -234,12 +387,42 @@ export const GmailBlock: BlockConfig = { return 'gmail_search' case 'read_gmail': return 'gmail_read' + case 'move_gmail': + return 'gmail_move' + case 'mark_read_gmail': + return 'gmail_mark_read' + case 'mark_unread_gmail': + return 'gmail_mark_unread' + case 'archive_gmail': + return 'gmail_archive' + case 'unarchive_gmail': + return 'gmail_unarchive' + case 'delete_gmail': + return 'gmail_delete' + case 'add_label_gmail': + return 'gmail_add_label' + case 'remove_label_gmail': + return 'gmail_remove_label' default: throw new Error(`Invalid Gmail operation: ${params.operation}`) } }, params: (params) => { - const { credential, folder, manualFolder, ...rest } = params + const { + credential, + folder, + manualFolder, + destinationLabel, + manualDestinationLabel, + sourceLabel, + manualSourceLabel, + moveMessageId, + actionMessageId, + labelActionMessageId, + labelManagement, + manualLabelManagement, + ...rest + } = params // Handle both selector and manual folder input const effectiveFolder = (folder || manualFolder || '').trim() @@ -248,6 +431,43 @@ export const GmailBlock: BlockConfig = { rest.folder = effectiveFolder || 'INBOX' } + // Handle move operation + if (rest.operation === 'move_gmail') { + if (moveMessageId) { + rest.messageId = moveMessageId + } + if (!rest.addLabelIds) { + rest.addLabelIds = (destinationLabel || manualDestinationLabel || '').trim() + } + if (!rest.removeLabelIds) { + rest.removeLabelIds = (sourceLabel || manualSourceLabel || '').trim() + } + } + + // Handle simple message ID operations + if ( + [ + 'mark_read_gmail', + 'mark_unread_gmail', + 'archive_gmail', + 'unarchive_gmail', + 'delete_gmail', + ].includes(rest.operation) + ) { + if (actionMessageId) { + rest.messageId = actionMessageId + } + } + + if (['add_label_gmail', 'remove_label_gmail'].includes(rest.operation)) { + if (labelActionMessageId) { + rest.messageId = labelActionMessageId + } + if (!rest.labelIds) { + rest.labelIds = (labelManagement || manualLabelManagement || '').trim() + } + } + return { ...rest, credential, @@ -279,6 +499,20 @@ export const GmailBlock: BlockConfig = { // Search operation inputs query: { type: 'string', description: 'Search query' }, maxResults: { type: 'number', description: 'Maximum results' }, + // Move operation inputs + moveMessageId: { type: 'string', description: 'Message ID to move' }, + destinationLabel: { type: 'string', description: 'Destination label ID' }, + manualDestinationLabel: { type: 'string', description: 'Manual destination label ID' }, + sourceLabel: { type: 'string', description: 'Source label ID to remove' }, + manualSourceLabel: { type: 'string', description: 'Manual source label ID' }, + addLabelIds: { type: 'string', description: 'Label IDs to add' }, + removeLabelIds: { type: 'string', description: 'Label IDs to remove' }, + // Action operation inputs + actionMessageId: { type: 'string', description: 'Message ID for actions' }, + labelActionMessageId: { type: 'string', description: 'Message ID for label actions' }, + labelManagement: { type: 'string', description: 'Label ID for management' }, + manualLabelManagement: { type: 'string', description: 'Manual label ID' }, + labelIds: { type: 'string', description: 'Label IDs for add/remove operations' }, }, outputs: { // Tool outputs diff --git a/apps/sim/blocks/blocks/google_drive.ts b/apps/sim/blocks/blocks/google_drive.ts index 60a6f9708..26665d45b 100644 --- a/apps/sim/blocks/blocks/google_drive.ts +++ b/apps/sim/blocks/blocks/google_drive.ts @@ -24,6 +24,7 @@ export const GoogleDriveBlock: BlockConfig = { { label: 'Create Folder', id: 'create_folder' }, { label: 'Create File', id: 'create_file' }, { label: 'Upload File', id: 'upload' }, + { label: 'Download File', id: 'download' }, { label: 'List Files', id: 'list' }, ], value: () => 'create_folder', @@ -259,9 +260,79 @@ export const GoogleDriveBlock: BlockConfig = { placeholder: 'Number of results (default: 100, max: 1000)', condition: { field: 'operation', value: 'list' }, }, + // Download File Fields - File Selector (basic mode) + { + id: 'fileSelector', + title: 'Select File', + type: 'file-selector', + layout: 'full', + canonicalParamId: 'fileId', + provider: 'google-drive', + serviceId: 'google-drive', + requiredScopes: [ + 'https://www.googleapis.com/auth/drive.readonly', + 'https://www.googleapis.com/auth/drive.file', + ], + placeholder: 'Select a file to download', + mode: 'basic', + dependsOn: ['credential'], + condition: { field: 'operation', value: 'download' }, + }, + // Manual File ID input (advanced mode) + { + id: 'manualFileId', + title: 'File ID', + type: 'short-input', + layout: 'full', + canonicalParamId: 'fileId', + placeholder: 'Enter file ID', + mode: 'advanced', + condition: { field: 'operation', value: 'download' }, + required: true, + }, + // Export format for Google Workspace files (download operation) + { + id: 'mimeType', + title: 'Export Format (for Google Workspace files)', + type: 'dropdown', + layout: 'full', + options: [ + { label: 'Plain Text (text/plain)', id: 'text/plain' }, + { label: 'HTML (text/html)', id: 'text/html' }, + { label: 'PDF (application/pdf)', id: 'application/pdf' }, + { + label: 'DOCX (MS Word)', + id: 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', + }, + { + label: 'XLSX (MS Excel)', + id: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', + }, + { + label: 'PPTX (MS PowerPoint)', + id: 'application/vnd.openxmlformats-officedocument.presentationml.presentation', + }, + { label: 'CSV (text/csv)', id: 'text/csv' }, + ], + placeholder: 'Optional: Choose export format for Google Docs/Sheets/Slides', + condition: { field: 'operation', value: 'download' }, + }, + { + id: 'fileName', + title: 'File Name Override', + type: 'short-input', + layout: 'full', + placeholder: 'Optional: Override the filename', + condition: { field: 'operation', value: 'download' }, + }, ], tools: { - access: ['google_drive_upload', 'google_drive_create_folder', 'google_drive_list'], + access: [ + 'google_drive_upload', + 'google_drive_create_folder', + 'google_drive_download', + 'google_drive_list', + ], config: { tool: (params) => { switch (params.operation) { @@ -270,6 +341,8 @@ export const GoogleDriveBlock: BlockConfig = { return 'google_drive_upload' case 'create_folder': return 'google_drive_create_folder' + case 'download': + return 'google_drive_download' case 'list': return 'google_drive_list' default: @@ -277,14 +350,26 @@ export const GoogleDriveBlock: BlockConfig = { } }, params: (params) => { - const { credential, folderSelector, manualFolderId, mimeType, ...rest } = params + const { + credential, + folderSelector, + manualFolderId, + fileSelector, + manualFileId, + mimeType, + ...rest + } = params // Use folderSelector if provided, otherwise use manualFolderId const effectiveFolderId = (folderSelector || manualFolderId || '').trim() + // Use fileSelector if provided, otherwise use manualFileId + const effectiveFileId = (fileSelector || manualFileId || '').trim() + return { credential, folderId: effectiveFolderId || undefined, + fileId: effectiveFileId || undefined, pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined, mimeType: mimeType, ...rest, @@ -299,7 +384,10 @@ export const GoogleDriveBlock: BlockConfig = { fileName: { type: 'string', description: 'File or folder name' }, file: { type: 'json', description: 'File to upload (UserFile object)' }, content: { type: 'string', description: 'Text content to upload' }, - mimeType: { type: 'string', description: 'File MIME type' }, + mimeType: { type: 'string', description: 'File MIME type or export format' }, + // Download operation inputs + fileSelector: { type: 'string', description: 'Selected file to download' }, + manualFileId: { type: 'string', description: 'Manual file identifier' }, // List operation inputs folderSelector: { type: 'string', description: 'Selected folder' }, manualFolderId: { type: 'string', description: 'Manual folder identifier' }, diff --git a/apps/sim/blocks/blocks/onedrive.ts b/apps/sim/blocks/blocks/onedrive.ts index bd3268de0..e235df8fe 100644 --- a/apps/sim/blocks/blocks/onedrive.ts +++ b/apps/sim/blocks/blocks/onedrive.ts @@ -1,8 +1,11 @@ import { MicrosoftOneDriveIcon } from '@/components/icons' +import { createLogger } from '@/lib/logs/console/logger' import type { BlockConfig } from '@/blocks/types' import { AuthMode } from '@/blocks/types' import type { OneDriveResponse } from '@/tools/onedrive/types' +const logger = createLogger('OneDriveBlock') + export const OneDriveBlock: BlockConfig = { type: 'onedrive', name: 'OneDrive', @@ -25,6 +28,7 @@ export const OneDriveBlock: BlockConfig = { { label: 'Create Folder', id: 'create_folder' }, { label: 'Create File', id: 'create_file' }, { label: 'Upload File', id: 'upload' }, + { label: 'Download File', id: 'download' }, { label: 'List Files', id: 'list' }, ], }, @@ -259,9 +263,53 @@ export const OneDriveBlock: BlockConfig = { placeholder: 'Number of results (default: 100, max: 1000)', condition: { field: 'operation', value: 'list' }, }, + // Download File Fields - File Selector (basic mode) + { + id: 'fileSelector', + title: 'Select File', + type: 'file-selector', + layout: 'full', + canonicalParamId: 'fileId', + provider: 'microsoft', + serviceId: 'onedrive', + requiredScopes: [ + 'openid', + 'profile', + 'email', + 'Files.Read', + 'Files.ReadWrite', + 'offline_access', + ], + mimeType: 'file', // Exclude folders, show only files + placeholder: 'Select a file to download', + mode: 'basic', + dependsOn: ['credential'], + condition: { field: 'operation', value: 'download' }, + }, + // Manual File ID input (advanced mode) + { + id: 'manualFileId', + title: 'File ID', + type: 'short-input', + layout: 'full', + canonicalParamId: 'fileId', + placeholder: 'Enter file ID', + mode: 'advanced', + condition: { field: 'operation', value: 'download' }, + required: true, + }, + { + id: 'downloadFileName', + title: 'File Name Override', + type: 'short-input', + layout: 'full', + canonicalParamId: 'fileName', + placeholder: 'Optional: Override the filename', + condition: { field: 'operation', value: 'download' }, + }, ], tools: { - access: ['onedrive_upload', 'onedrive_create_folder', 'onedrive_list'], + access: ['onedrive_upload', 'onedrive_create_folder', 'onedrive_download', 'onedrive_list'], config: { tool: (params) => { switch (params.operation) { @@ -270,6 +318,8 @@ export const OneDriveBlock: BlockConfig = { return 'onedrive_upload' case 'create_folder': return 'onedrive_create_folder' + case 'download': + return 'onedrive_download' case 'list': return 'onedrive_list' default: @@ -277,10 +327,7 @@ export const OneDriveBlock: BlockConfig = { } }, params: (params) => { - const { credential, folderSelector, manualFolderId, mimeType, values, ...rest } = params - - // Use folderSelector if provided, otherwise use manualFolderId - const effectiveFolderId = (folderSelector || manualFolderId || '').trim() + const { credential, folderId, fileId, mimeType, values, ...rest } = params let parsedValues try { @@ -293,7 +340,8 @@ export const OneDriveBlock: BlockConfig = { credential, ...rest, values: parsedValues, - folderId: effectiveFolderId || undefined, + folderId: folderId || undefined, + fileId: fileId || undefined, pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined, mimeType: mimeType, } @@ -310,11 +358,9 @@ export const OneDriveBlock: BlockConfig = { content: { type: 'string', description: 'Text content to upload' }, mimeType: { type: 'string', description: 'MIME type of file to create' }, values: { type: 'string', description: 'Cell values for new Excel as JSON' }, - // Get Content operation inputs - // fileId: { type: 'string', required: false }, - // List operation inputs - folderSelector: { type: 'string', description: 'Folder selector' }, - manualFolderId: { type: 'string', description: 'Manual folder ID' }, + fileId: { type: 'string', description: 'File ID to download' }, + downloadFileName: { type: 'string', description: 'File name override for download' }, + folderId: { type: 'string', description: 'Folder ID' }, query: { type: 'string', description: 'Search query' }, pageSize: { type: 'number', description: 'Results per page' }, }, diff --git a/apps/sim/blocks/blocks/outlook.ts b/apps/sim/blocks/blocks/outlook.ts index 75c531b92..c82f76915 100644 --- a/apps/sim/blocks/blocks/outlook.ts +++ b/apps/sim/blocks/blocks/outlook.ts @@ -7,10 +7,10 @@ import { getTrigger } from '@/triggers' export const OutlookBlock: BlockConfig = { type: 'outlook', name: 'Outlook', - description: 'Access Outlook', + description: 'Send, read, draft, forward, and move Outlook email messages', authMode: AuthMode.OAuth, longDescription: - 'Integrate Outlook into the workflow. Can read, draft, and send email messages. Can be used in trigger mode to trigger a workflow when a new email is received.', + 'Integrate Outlook into the workflow. Can read, draft, send, forward, and move email messages. Can be used in trigger mode to trigger a workflow when a new email is received.', docsLink: 'https://docs.sim.ai/tools/outlook', category: 'tools', triggerAllowed: true, @@ -27,6 +27,11 @@ export const OutlookBlock: BlockConfig = { { label: 'Draft Email', id: 'draft_outlook' }, { label: 'Read Email', id: 'read_outlook' }, { label: 'Forward Email', id: 'forward_outlook' }, + { label: 'Move Email', id: 'move_outlook' }, + { label: 'Mark as Read', id: 'mark_read_outlook' }, + { label: 'Mark as Unread', id: 'mark_unread_outlook' }, + { label: 'Delete Email', id: 'delete_outlook' }, + { label: 'Copy Email', id: 'copy_outlook' }, ], value: () => 'send_outlook', }, @@ -206,10 +211,109 @@ export const OutlookBlock: BlockConfig = { layout: 'full', condition: { field: 'operation', value: 'read_outlook' }, }, + // Move Email Fields + { + id: 'moveMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email to move', + condition: { field: 'operation', value: 'move_outlook' }, + required: true, + }, + // Destination folder selector (basic mode) + { + id: 'destinationFolder', + title: 'Move To Folder', + type: 'folder-selector', + layout: 'full', + canonicalParamId: 'destinationId', + provider: 'outlook', + serviceId: 'outlook', + requiredScopes: ['Mail.ReadWrite', 'Mail.ReadBasic', 'Mail.Read'], + placeholder: 'Select destination folder', + dependsOn: ['credential'], + mode: 'basic', + condition: { field: 'operation', value: 'move_outlook' }, + required: true, + }, + // Manual destination folder input (advanced mode) + { + id: 'manualDestinationFolder', + title: 'Move To Folder', + type: 'short-input', + layout: 'full', + canonicalParamId: 'destinationId', + placeholder: 'Enter folder ID', + mode: 'advanced', + condition: { field: 'operation', value: 'move_outlook' }, + required: true, + }, + // Mark as Read/Unread, Delete - Message ID field + { + id: 'actionMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email', + condition: { + field: 'operation', + value: ['mark_read_outlook', 'mark_unread_outlook', 'delete_outlook'], + }, + required: true, + }, + // Copy Email - Message ID field + { + id: 'copyMessageId', + title: 'Message ID', + type: 'short-input', + layout: 'full', + placeholder: 'ID of the email to copy', + condition: { field: 'operation', value: 'copy_outlook' }, + required: true, + }, + // Copy Email - Destination folder selector (basic mode) + { + id: 'copyDestinationFolder', + title: 'Copy To Folder', + type: 'folder-selector', + layout: 'full', + canonicalParamId: 'copyDestinationId', + provider: 'outlook', + serviceId: 'outlook', + requiredScopes: ['Mail.ReadWrite', 'Mail.ReadBasic', 'Mail.Read'], + placeholder: 'Select destination folder', + dependsOn: ['credential'], + mode: 'basic', + condition: { field: 'operation', value: 'copy_outlook' }, + required: true, + }, + // Copy Email - Manual destination folder input (advanced mode) + { + id: 'manualCopyDestinationFolder', + title: 'Copy To Folder', + type: 'short-input', + layout: 'full', + canonicalParamId: 'copyDestinationId', + placeholder: 'Enter folder ID', + mode: 'advanced', + condition: { field: 'operation', value: 'copy_outlook' }, + required: true, + }, ...getTrigger('outlook_poller').subBlocks, ], tools: { - access: ['outlook_send', 'outlook_draft', 'outlook_read', 'outlook_forward'], + access: [ + 'outlook_send', + 'outlook_draft', + 'outlook_read', + 'outlook_forward', + 'outlook_move', + 'outlook_mark_read', + 'outlook_mark_unread', + 'outlook_delete', + 'outlook_copy', + ], config: { tool: (params) => { switch (params.operation) { @@ -221,12 +325,34 @@ export const OutlookBlock: BlockConfig = { return 'outlook_draft' case 'forward_outlook': return 'outlook_forward' + case 'move_outlook': + return 'outlook_move' + case 'mark_read_outlook': + return 'outlook_mark_read' + case 'mark_unread_outlook': + return 'outlook_mark_unread' + case 'delete_outlook': + return 'outlook_delete' + case 'copy_outlook': + return 'outlook_copy' default: throw new Error(`Invalid Outlook operation: ${params.operation}`) } }, params: (params) => { - const { credential, folder, manualFolder, ...rest } = params + const { + credential, + folder, + manualFolder, + destinationFolder, + manualDestinationFolder, + moveMessageId, + actionMessageId, + copyMessageId, + copyDestinationFolder, + manualCopyDestinationFolder, + ...rest + } = params // Handle both selector and manual folder input const effectiveFolder = (folder || manualFolder || '').trim() @@ -235,9 +361,40 @@ export const OutlookBlock: BlockConfig = { rest.folder = effectiveFolder || 'INBOX' } + // Handle move operation + if (rest.operation === 'move_outlook') { + if (moveMessageId) { + rest.messageId = moveMessageId + } + if (!rest.destinationId) { + rest.destinationId = (destinationFolder || manualDestinationFolder || '').trim() + } + } + + if ( + ['mark_read_outlook', 'mark_unread_outlook', 'delete_outlook'].includes(rest.operation) + ) { + if (actionMessageId) { + rest.messageId = actionMessageId + } + } + + if (rest.operation === 'copy_outlook') { + if (copyMessageId) { + rest.messageId = copyMessageId + } + // Handle copyDestinationId (from UI canonical param) or destinationId (from trigger) + if (rest.copyDestinationId) { + rest.destinationId = rest.copyDestinationId + rest.copyDestinationId = undefined + } else if (!rest.destinationId) { + rest.destinationId = (copyDestinationFolder || manualCopyDestinationFolder || '').trim() + } + } + return { ...rest, - credential, // Keep the credential parameter + credential, } }, }, @@ -259,6 +416,20 @@ export const OutlookBlock: BlockConfig = { manualFolder: { type: 'string', description: 'Manual folder name' }, maxResults: { type: 'number', description: 'Maximum emails' }, includeAttachments: { type: 'boolean', description: 'Include email attachments' }, + // Move operation inputs + moveMessageId: { type: 'string', description: 'Message ID to move' }, + destinationFolder: { type: 'string', description: 'Destination folder ID' }, + manualDestinationFolder: { type: 'string', description: 'Manual destination folder ID' }, + destinationId: { type: 'string', description: 'Destination folder ID for move' }, + // Action operation inputs + actionMessageId: { type: 'string', description: 'Message ID for actions' }, + copyMessageId: { type: 'string', description: 'Message ID to copy' }, + copyDestinationFolder: { type: 'string', description: 'Copy destination folder ID' }, + manualCopyDestinationFolder: { + type: 'string', + description: 'Manual copy destination folder ID', + }, + copyDestinationId: { type: 'string', description: 'Destination folder ID for copy' }, }, outputs: { // Common outputs diff --git a/apps/sim/blocks/blocks/slack.ts b/apps/sim/blocks/blocks/slack.ts index e57321e70..d48b77ccd 100644 --- a/apps/sim/blocks/blocks/slack.ts +++ b/apps/sim/blocks/blocks/slack.ts @@ -7,10 +7,11 @@ import { getTrigger } from '@/triggers' export const SlackBlock: BlockConfig = { type: 'slack', name: 'Slack', - description: 'Send messages to Slack or trigger workflows from Slack events', + description: + 'Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events', authMode: AuthMode.OAuth, longDescription: - 'Integrate Slack into the workflow. Can send messages, create canvases, and read messages. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.', + 'Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.', docsLink: 'https://docs.sim.ai/tools/slack', category: 'tools', bgColor: '#611f69', @@ -26,6 +27,10 @@ export const SlackBlock: BlockConfig = { { label: 'Send Message', id: 'send' }, { label: 'Create Canvas', id: 'canvas' }, { label: 'Read Messages', id: 'read' }, + { label: 'Download File', id: 'download' }, + { label: 'Update Message', id: 'update' }, + { label: 'Delete Message', id: 'delete' }, + { label: 'Add Reaction', id: 'react' }, ], value: () => 'send', }, @@ -110,6 +115,19 @@ export const SlackBlock: BlockConfig = { }, required: true, }, + { + id: 'threadTs', + title: 'Thread Timestamp (Optional)', + type: 'short-input', + layout: 'full', + canonicalParamId: 'thread_ts', + placeholder: 'Reply to thread (e.g., 1405894322.002768)', + condition: { + field: 'operation', + value: 'send', + }, + required: false, + }, // File upload (basic mode) { id: 'attachmentFiles', @@ -183,10 +201,111 @@ export const SlackBlock: BlockConfig = { value: 'read', }, }, + // Download File specific fields + { + id: 'fileId', + title: 'File ID', + type: 'short-input', + layout: 'full', + placeholder: 'Enter Slack file ID (e.g., F1234567890)', + condition: { + field: 'operation', + value: 'download', + }, + required: true, + }, + { + id: 'downloadFileName', + title: 'File Name Override', + type: 'short-input', + layout: 'full', + canonicalParamId: 'fileName', + placeholder: 'Optional: Override the filename', + condition: { + field: 'operation', + value: 'download', + }, + }, + // Update Message specific fields + { + id: 'updateTimestamp', + title: 'Message Timestamp', + type: 'short-input', + layout: 'full', + canonicalParamId: 'timestamp', + placeholder: 'Message timestamp (e.g., 1405894322.002768)', + condition: { + field: 'operation', + value: 'update', + }, + required: true, + }, + { + id: 'updateText', + title: 'New Message Text', + type: 'long-input', + layout: 'full', + canonicalParamId: 'text', + placeholder: 'Enter new message text (supports Slack mrkdwn)', + condition: { + field: 'operation', + value: 'update', + }, + required: true, + }, + // Delete Message specific fields + { + id: 'deleteTimestamp', + title: 'Message Timestamp', + type: 'short-input', + layout: 'full', + canonicalParamId: 'timestamp', + placeholder: 'Message timestamp (e.g., 1405894322.002768)', + condition: { + field: 'operation', + value: 'delete', + }, + required: true, + }, + // Add Reaction specific fields + { + id: 'reactionTimestamp', + title: 'Message Timestamp', + type: 'short-input', + layout: 'full', + canonicalParamId: 'timestamp', + placeholder: 'Message timestamp (e.g., 1405894322.002768)', + condition: { + field: 'operation', + value: 'react', + }, + required: true, + }, + { + id: 'emojiName', + title: 'Emoji Name', + type: 'short-input', + layout: 'full', + canonicalParamId: 'name', + placeholder: 'Emoji name without colons (e.g., thumbsup, heart, eyes)', + condition: { + field: 'operation', + value: 'react', + }, + required: true, + }, ...getTrigger('slack_webhook').subBlocks, ], tools: { - access: ['slack_message', 'slack_canvas', 'slack_message_reader'], + access: [ + 'slack_message', + 'slack_canvas', + 'slack_message_reader', + 'slack_download', + 'slack_update_message', + 'slack_delete_message', + 'slack_add_reaction', + ], config: { tool: (params) => { switch (params.operation) { @@ -196,6 +315,14 @@ export const SlackBlock: BlockConfig = { return 'slack_canvas' case 'read': return 'slack_message_reader' + case 'download': + return 'slack_download' + case 'update': + return 'slack_update_message' + case 'delete': + return 'slack_delete_message' + case 'react': + return 'slack_add_reaction' default: throw new Error(`Invalid Slack operation: ${params.operation}`) } @@ -214,6 +341,12 @@ export const SlackBlock: BlockConfig = { oldest, attachmentFiles, files, + threadTs, + updateTimestamp, + updateText, + deleteTimestamp, + reactionTimestamp, + emojiName, ...rest } = params @@ -249,6 +382,10 @@ export const SlackBlock: BlockConfig = { throw new Error('Message text is required for send operation') } baseParams.text = rest.text + // Add thread_ts if provided + if (threadTs) { + baseParams.thread_ts = threadTs + } // Add files if provided const fileParam = attachmentFiles || files if (fileParam) { @@ -276,6 +413,42 @@ export const SlackBlock: BlockConfig = { baseParams.oldest = oldest } break + + case 'download': { + const fileId = (rest as any).fileId + const downloadFileName = (rest as any).downloadFileName + if (!fileId) { + throw new Error('File ID is required for download operation') + } + baseParams.fileId = fileId + if (downloadFileName) { + baseParams.fileName = downloadFileName + } + break + } + + case 'update': + if (!updateTimestamp || !updateText) { + throw new Error('Timestamp and text are required for update operation') + } + baseParams.timestamp = updateTimestamp + baseParams.text = updateText + break + + case 'delete': + if (!deleteTimestamp) { + throw new Error('Timestamp is required for delete operation') + } + baseParams.timestamp = deleteTimestamp + break + + case 'react': + if (!reactionTimestamp || !emojiName) { + throw new Error('Timestamp and emoji name are required for reaction operation') + } + baseParams.timestamp = reactionTimestamp + baseParams.name = emojiName + break } return baseParams @@ -296,6 +469,18 @@ export const SlackBlock: BlockConfig = { content: { type: 'string', description: 'Canvas content' }, limit: { type: 'string', description: 'Message limit' }, oldest: { type: 'string', description: 'Oldest timestamp' }, + fileId: { type: 'string', description: 'File ID to download' }, + downloadFileName: { type: 'string', description: 'File name override for download' }, + // Update/Delete/React operation inputs + updateTimestamp: { type: 'string', description: 'Message timestamp for update' }, + updateText: { type: 'string', description: 'New text for update' }, + deleteTimestamp: { type: 'string', description: 'Message timestamp for delete' }, + reactionTimestamp: { type: 'string', description: 'Message timestamp for reaction' }, + emojiName: { type: 'string', description: 'Emoji name for reaction' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + name: { type: 'string', description: 'Emoji name' }, + threadTs: { type: 'string', description: 'Thread timestamp' }, + thread_ts: { type: 'string', description: 'Thread timestamp for reply' }, }, outputs: { // slack_message outputs @@ -309,13 +494,24 @@ export const SlackBlock: BlockConfig = { // slack_message_reader outputs messages: { type: 'json', - description: 'Array of message objects', + description: 'Array of message objects with text, user, timestamp, and file attachments', + }, + + // slack_download outputs + file: { + type: 'json', + description: 'Downloaded file stored in execution files', }, // Trigger outputs (when used as webhook trigger) event_type: { type: 'string', description: 'Type of Slack event that triggered the workflow' }, channel_name: { type: 'string', description: 'Human-readable channel name' }, user_name: { type: 'string', description: 'Username who triggered the event' }, + timestamp: { type: 'string', description: 'Message timestamp from the triggering event' }, + thread_ts: { + type: 'string', + description: 'Parent thread timestamp (if message is in a thread)', + }, team_id: { type: 'string', description: 'Slack workspace/team ID' }, event_id: { type: 'string', description: 'Unique event identifier for the trigger' }, }, diff --git a/apps/sim/components/ui/tag-dropdown.tsx b/apps/sim/components/ui/tag-dropdown.tsx index 5e7c15313..3052c0a8f 100644 --- a/apps/sim/components/ui/tag-dropdown.tsx +++ b/apps/sim/components/ui/tag-dropdown.tsx @@ -1043,7 +1043,8 @@ export const TagDropdown: React.FC = ({ let processedTag = tag // Check if this is a file property and add [0] automatically - const fileProperties = ['url', 'name', 'size', 'type', 'key', 'uploadedAt', 'expiresAt'] + // Only include user-accessible fields (matches UserFile interface) + const fileProperties = ['id', 'name', 'url', 'size', 'type'] const parts = tag.split('.') if (parts.length >= 2 && fileProperties.includes(parts[parts.length - 1])) { const fieldName = parts[parts.length - 2] diff --git a/apps/sim/executor/execution/executor.ts b/apps/sim/executor/execution/executor.ts index e5c69b82e..4f2703f32 100644 --- a/apps/sim/executor/execution/executor.ts +++ b/apps/sim/executor/execution/executor.ts @@ -98,6 +98,7 @@ export class DAGExecutor { workflowId, workspaceId: this.contextExtensions.workspaceId, executionId: this.contextExtensions.executionId, + userId: this.contextExtensions.userId, isDeployedContext: this.contextExtensions.isDeployedContext, blockStates: new Map(), blockLogs: [], diff --git a/apps/sim/executor/execution/types.ts b/apps/sim/executor/execution/types.ts index 10dca934e..8a69507a6 100644 --- a/apps/sim/executor/execution/types.ts +++ b/apps/sim/executor/execution/types.ts @@ -4,6 +4,7 @@ import type { SubflowType } from '@/stores/workflows/workflow/types' export interface ContextExtensions { workspaceId?: string executionId?: string + userId?: string stream?: boolean selectedOutputs?: string[] edges?: Array<{ source: string; target: string }> diff --git a/apps/sim/executor/handlers/api/api-handler.ts b/apps/sim/executor/handlers/api/api-handler.ts index e5a7689e3..832e99c88 100644 --- a/apps/sim/executor/handlers/api/api-handler.ts +++ b/apps/sim/executor/handlers/api/api-handler.ts @@ -89,6 +89,7 @@ export class ApiBlockHandler implements BlockHandler { _context: { workflowId: ctx.workflowId, workspaceId: ctx.workspaceId, + executionId: ctx.executionId, }, }, false, diff --git a/apps/sim/executor/handlers/generic/generic-handler.ts b/apps/sim/executor/handlers/generic/generic-handler.ts index 94dab70bc..1150ec734 100644 --- a/apps/sim/executor/handlers/generic/generic-handler.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.ts @@ -66,6 +66,7 @@ export class GenericBlockHandler implements BlockHandler { _context: { workflowId: ctx.workflowId, workspaceId: ctx.workspaceId, + executionId: ctx.executionId, }, }, false, diff --git a/apps/sim/executor/types.ts b/apps/sim/executor/types.ts index 12acc50cd..335afcd18 100644 --- a/apps/sim/executor/types.ts +++ b/apps/sim/executor/types.ts @@ -12,8 +12,6 @@ export interface UserFile { size: number type: string key: string - uploadedAt: string - expiresAt: string context?: string } @@ -107,6 +105,7 @@ export interface ExecutionContext { workflowId: string // Unique identifier for this workflow execution workspaceId?: string // Workspace ID for file storage scoping executionId?: string // Unique execution ID for file storage scoping + userId?: string // User ID for file storage attribution // Whether this execution is running against deployed state (API/webhook/schedule/chat) // Manual executions in the builder should leave this undefined/false isDeployedContext?: boolean diff --git a/apps/sim/executor/utils/file-tool-processor.ts b/apps/sim/executor/utils/file-tool-processor.ts index f910e69f1..d78d308ed 100644 --- a/apps/sim/executor/utils/file-tool-processor.ts +++ b/apps/sim/executor/utils/file-tool-processor.ts @@ -1,5 +1,5 @@ import { createLogger } from '@/lib/logs/console/logger' -import { uploadExecutionFile } from '@/lib/uploads/contexts/execution' +import { uploadExecutionFile, uploadFileFromRawData } from '@/lib/uploads/contexts/execution' import type { ExecutionContext, UserFile } from '@/executor/types' import type { ToolConfig, ToolFileData } from '@/tools/types' @@ -73,7 +73,7 @@ export class FileToolProcessor { if (outputType === 'file[]') { return FileToolProcessor.processFileArray(fileData, outputKey, executionContext) } - return FileToolProcessor.processFileData(fileData, executionContext, outputKey) + return FileToolProcessor.processFileData(fileData, executionContext) } /** @@ -89,9 +89,7 @@ export class FileToolProcessor { } return Promise.all( - fileData.map((file, index) => - FileToolProcessor.processFileData(file, executionContext, `${outputKey}[${index}]`) - ) + fileData.map((file, index) => FileToolProcessor.processFileData(file, executionContext)) ) } @@ -100,49 +98,10 @@ export class FileToolProcessor { */ private static async processFileData( fileData: ToolFileData, - context: ExecutionContext, - outputKey: string + context: ExecutionContext ): Promise { - logger.info(`Processing file data for output '${outputKey}': ${fileData.name}`) try { - // Convert various formats to Buffer - let buffer: Buffer - - if (Buffer.isBuffer(fileData.data)) { - buffer = fileData.data - logger.info(`Using Buffer data for ${fileData.name} (${buffer.length} bytes)`) - } else if ( - fileData.data && - typeof fileData.data === 'object' && - 'type' in fileData.data && - 'data' in fileData.data - ) { - // Handle serialized Buffer objects (from JSON serialization) - const serializedBuffer = fileData.data as { type: string; data: number[] } - if (serializedBuffer.type === 'Buffer' && Array.isArray(serializedBuffer.data)) { - buffer = Buffer.from(serializedBuffer.data) - } else { - throw new Error(`Invalid serialized buffer format for ${fileData.name}`) - } - logger.info( - `Converted serialized Buffer to Buffer for ${fileData.name} (${buffer.length} bytes)` - ) - } else if (typeof fileData.data === 'string' && fileData.data) { - // Assume base64 or base64url - let base64Data = fileData.data - - // Convert base64url to base64 if needed (Gmail API format) - if (base64Data && (base64Data.includes('-') || base64Data.includes('_'))) { - base64Data = base64Data.replace(/-/g, '+').replace(/_/g, '/') - } - - buffer = Buffer.from(base64Data, 'base64') - logger.info( - `Converted base64 string to Buffer for ${fileData.name} (${buffer.length} bytes)` - ) - } else if (fileData.url) { - // Download from URL - logger.info(`Downloading file from URL: ${fileData.url}`) + if (fileData.url) { const response = await fetch(fileData.url) if (!response.ok) { @@ -150,35 +109,38 @@ export class FileToolProcessor { } const arrayBuffer = await response.arrayBuffer() - buffer = Buffer.from(arrayBuffer) - logger.info(`Downloaded file from URL for ${fileData.name} (${buffer.length} bytes)`) - } else { - throw new Error( - `File data for '${fileData.name}' must have either 'data' (Buffer/base64) or 'url' property` + const buffer = Buffer.from(arrayBuffer) + + if (buffer.length === 0) { + throw new Error(`File '${fileData.name}' has zero bytes`) + } + + return await uploadExecutionFile( + { + workspaceId: context.workspaceId || '', + workflowId: context.workflowId, + executionId: context.executionId || '', + }, + buffer, + fileData.name, + fileData.mimeType, + context.userId ) } - // Validate buffer - if (buffer.length === 0) { - throw new Error(`File '${fileData.name}' has zero bytes`) - } - - // Store in execution filesystem - const userFile = await uploadExecutionFile( + return uploadFileFromRawData( + { + name: fileData.name, + data: fileData.data, + mimeType: fileData.mimeType, + }, { workspaceId: context.workspaceId || '', workflowId: context.workflowId, executionId: context.executionId || '', }, - buffer, - fileData.name, - fileData.mimeType + context.userId ) - - logger.info( - `Successfully stored file '${fileData.name}' in execution filesystem with key: ${userFile.key}` - ) - return userFile } catch (error) { logger.error(`Error processing file data for '${fileData.name}':`, error) throw error diff --git a/apps/sim/executor/utils/start-block.ts b/apps/sim/executor/utils/start-block.ts index 7865b6a8e..f1b9c3a6e 100644 --- a/apps/sim/executor/utils/start-block.ts +++ b/apps/sim/executor/utils/start-block.ts @@ -1,3 +1,4 @@ +import { isUserFile } from '@/lib/utils' import { classifyStartBlockType, getLegacyStarterMode, @@ -233,20 +234,6 @@ function getRawInputCandidate(workflowInput: unknown): unknown { return workflowInput } -function isUserFile(candidate: unknown): candidate is UserFile { - if (!isPlainObject(candidate)) { - return false - } - - return ( - typeof candidate.id === 'string' && - typeof candidate.name === 'string' && - typeof candidate.url === 'string' && - typeof candidate.size === 'number' && - typeof candidate.type === 'string' - ) -} - function getFilesFromWorkflowInput(workflowInput: unknown): UserFile[] | undefined { if (!isPlainObject(workflowInput)) { return undefined diff --git a/apps/sim/lib/auth.ts b/apps/sim/lib/auth.ts index cb00ea430..a698dc747 100644 --- a/apps/sim/lib/auth.ts +++ b/apps/sim/lib/auth.ts @@ -1177,7 +1177,9 @@ export const auth = betterAuth({ 'chat:write.public', 'users:read', 'files:write', + 'files:read', 'canvases:write', + 'reactions:write', ], responseType: 'code', accessType: 'offline', diff --git a/apps/sim/lib/execution/files.ts b/apps/sim/lib/execution/files.ts index f1a8fa936..9eafe54c4 100644 --- a/apps/sim/lib/execution/files.ts +++ b/apps/sim/lib/execution/files.ts @@ -2,11 +2,17 @@ import { v4 as uuidv4 } from 'uuid' import { createLogger } from '@/lib/logs/console/logger' import { uploadExecutionFile } from '@/lib/uploads/contexts/execution' import type { UserFile } from '@/executor/types' +import type { SerializedBlock } from '@/serializer/types' const logger = createLogger('ExecutionFiles') const MAX_FILE_SIZE = 20 * 1024 * 1024 // 20MB +interface InputFormatField { + name: string + type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'files' +} + /** * Process a single file for workflow execution - handles both base64 ('file' type) and URL pass-through ('url' type) */ @@ -64,8 +70,6 @@ export async function processExecutionFile( size: 0, type: file.mime || 'application/octet-stream', key: `url/${file.name}`, - uploadedAt: new Date().toISOString(), - expiresAt: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000).toISOString(), } } @@ -104,3 +108,86 @@ export async function processExecutionFiles( return uploadedFiles } + +/** + * Extract inputFormat fields from a start block or trigger block + */ +function extractInputFormatFromBlock(block: SerializedBlock): InputFormatField[] { + const inputFormatValue = block.config?.params?.inputFormat + + if (!Array.isArray(inputFormatValue) || inputFormatValue.length === 0) { + return [] + } + + return inputFormatValue.filter( + (field): field is InputFormatField => + field && + typeof field === 'object' && + 'name' in field && + 'type' in field && + typeof field.name === 'string' && + typeof field.type === 'string' + ) +} + +/** + * Process file fields in workflow input based on the start block's inputFormat + * This handles base64 and URL file inputs from API calls + */ +export async function processInputFileFields( + input: unknown, + blocks: SerializedBlock[], + executionContext: { workspaceId: string; workflowId: string; executionId: string }, + requestId: string, + userId?: string +): Promise { + if (!input || typeof input !== 'object' || blocks.length === 0) { + return input + } + + const startBlock = blocks.find((block) => { + const blockType = block.metadata?.id + return ( + blockType === 'start_trigger' || + blockType === 'api_trigger' || + blockType === 'input_trigger' || + blockType === 'generic_webhook' || + blockType === 'starter' + ) + }) + + if (!startBlock) { + return input + } + + const inputFormat = extractInputFormatFromBlock(startBlock) + const fileFields = inputFormat.filter((field) => field.type === 'files') + + if (fileFields.length === 0) { + return input + } + + const processedInput = { ...input } as Record + + for (const fileField of fileFields) { + const fieldValue = processedInput[fileField.name] + + if (fieldValue && typeof fieldValue === 'object') { + const uploadedFiles = await processExecutionFiles( + fieldValue, + executionContext, + requestId, + userId + ) + + if (uploadedFiles.length > 0) { + processedInput[fileField.name] = uploadedFiles + logger.info( + `[${requestId}] Successfully processed ${uploadedFiles.length} file(s) for field: ${fileField.name}` + ) + } + } + } + + return processedInput +} diff --git a/apps/sim/lib/file-parsers/pdf-parser.ts b/apps/sim/lib/file-parsers/pdf-parser.ts index ad68f3363..0dde70d7b 100644 --- a/apps/sim/lib/file-parsers/pdf-parser.ts +++ b/apps/sim/lib/file-parsers/pdf-parser.ts @@ -1,5 +1,5 @@ import { readFile } from 'fs/promises' -import pdfParse from 'pdf-parse' +import { PDFParse } from 'pdf-parse' import type { FileParseResult, FileParser } from '@/lib/file-parsers/types' import { createLogger } from '@/lib/logs/console/logger' @@ -29,24 +29,27 @@ export class PdfParser implements FileParser { try { logger.info('Starting to parse buffer, size:', dataBuffer.length) - const pdfData = await pdfParse(dataBuffer) + const parser = new PDFParse({ data: dataBuffer }) + const textResult = await parser.getText() + const infoResult = await parser.getInfo() + await parser.destroy() logger.info( 'PDF parsed successfully, pages:', - pdfData.numpages, + textResult.total, 'text length:', - pdfData.text.length + textResult.text.length ) // Remove null bytes from content (PostgreSQL JSONB doesn't allow them) - const cleanContent = pdfData.text.replace(/\u0000/g, '') + const cleanContent = textResult.text.replace(/\u0000/g, '') return { content: cleanContent, metadata: { - pageCount: pdfData.numpages, - info: pdfData.info, - version: pdfData.version, + pageCount: textResult.total, + info: infoResult.info, + version: infoResult.metadata?.get('pdf:PDFVersion'), source: 'pdf-parse', }, } diff --git a/apps/sim/lib/logs/execution/logger.ts b/apps/sim/lib/logs/execution/logger.ts index 90d275d52..eeb904480 100644 --- a/apps/sim/lib/logs/execution/logger.ts +++ b/apps/sim/lib/logs/execution/logger.ts @@ -26,6 +26,7 @@ import type { WorkflowExecutionSnapshot, WorkflowState, } from '@/lib/logs/types' +import { filterForDisplay, redactApiKeys } from '@/lib/utils' export interface ToolCall { name: string @@ -157,6 +158,11 @@ export class ExecutionLogger implements IExecutionLoggerService { // Extract files from trace spans, final output, and workflow input const executionFiles = this.extractFilesFromExecution(traceSpans, finalOutput, workflowInput) + const filteredTraceSpans = filterForDisplay(traceSpans) + const filteredFinalOutput = filterForDisplay(finalOutput) + const redactedTraceSpans = redactApiKeys(filteredTraceSpans) + const redactedFinalOutput = redactApiKeys(filteredFinalOutput) + const [updatedLog] = await db .update(workflowExecutionLogs) .set({ @@ -165,8 +171,8 @@ export class ExecutionLogger implements IExecutionLoggerService { totalDurationMs, files: executionFiles.length > 0 ? executionFiles : null, executionData: { - traceSpans, - finalOutput, + traceSpans: redactedTraceSpans, + finalOutput: redactedFinalOutput, tokenBreakdown: { prompt: costSummary.totalPromptTokens, completion: costSummary.totalCompletionTokens, @@ -492,10 +498,6 @@ export class ExecutionLogger implements IExecutionLoggerService { type: file.type, url: file.url, key: file.key, - uploadedAt: file.uploadedAt, - expiresAt: file.expiresAt, - storageProvider: file.storageProvider, - bucketName: file.bucketName, }) } } @@ -515,10 +517,6 @@ export class ExecutionLogger implements IExecutionLoggerService { type: file.type, url: file.url, key: file.key, - uploadedAt: file.uploadedAt, - expiresAt: file.expiresAt, - storageProvider: file.storageProvider, - bucketName: file.bucketName, }) } } diff --git a/apps/sim/lib/logs/search-suggestions.test.ts b/apps/sim/lib/logs/search-suggestions.test.ts index a3ca48135..59e56a456 100644 --- a/apps/sim/lib/logs/search-suggestions.test.ts +++ b/apps/sim/lib/logs/search-suggestions.test.ts @@ -107,19 +107,29 @@ describe('SearchSuggestions', () => { describe('generatePreview', () => { it.concurrent('should generate correct preview for filter keys', () => { - const suggestion = { id: 'test', value: 'level:', label: 'Status', category: 'filters' } + const suggestion = { + id: 'test', + value: 'level:', + label: 'Status', + category: 'filters' as const, + } const preview = engine.generatePreview(suggestion, '', 0) expect(preview).toBe('level:') }) it.concurrent('should generate correct preview for filter values', () => { - const suggestion = { id: 'test', value: 'error', label: 'Error', category: 'level' } + const suggestion = { id: 'test', value: 'error', label: 'Error', category: 'level' as const } const preview = engine.generatePreview(suggestion, 'level:', 6) expect(preview).toBe('level:error') }) it.concurrent('should handle partial replacements correctly', () => { - const suggestion = { id: 'test', value: 'level:', label: 'Status', category: 'filters' } + const suggestion = { + id: 'test', + value: 'level:', + label: 'Status', + category: 'filters' as const, + } const preview = engine.generatePreview(suggestion, 'lev', 3) expect(preview).toBe('level:') }) @@ -129,32 +139,42 @@ describe('SearchSuggestions', () => { id: 'test', value: '"workflow1"', label: 'workflow1', - category: 'workflow', + category: 'workflow' as const, } const preview = engine.generatePreview(suggestion, 'workflow:', 9) expect(preview).toBe('workflow:"workflow1"') }) it.concurrent('should add space when adding filter after completed filter', () => { - const suggestion = { id: 'test', value: 'trigger:', label: 'Trigger', category: 'filters' } + const suggestion = { + id: 'test', + value: 'trigger:', + label: 'Trigger', + category: 'filters' as const, + } const preview = engine.generatePreview(suggestion, 'level:error ', 12) expect(preview).toBe('level:error trigger:') }) it.concurrent('should handle multiple completed filters', () => { - const suggestion = { id: 'test', value: 'cost:', label: 'Cost', category: 'filters' } + const suggestion = { id: 'test', value: 'cost:', label: 'Cost', category: 'filters' as const } const preview = engine.generatePreview(suggestion, 'level:error trigger:api ', 24) expect(preview).toBe('level:error trigger:api cost:') }) it.concurrent('should handle adding same filter type multiple times', () => { - const suggestion = { id: 'test', value: 'level:', label: 'Status', category: 'filters' } + const suggestion = { + id: 'test', + value: 'level:', + label: 'Status', + category: 'filters' as const, + } const preview = engine.generatePreview(suggestion, 'level:error ', 12) expect(preview).toBe('level:error level:') }) it.concurrent('should handle filter value after existing filters', () => { - const suggestion = { id: 'test', value: 'info', label: 'Info', category: 'level' } + const suggestion = { id: 'test', value: 'info', label: 'Info', category: 'level' as const } const preview = engine.generatePreview(suggestion, 'level:error level:', 19) expect(preview).toBe('level:error level:info') }) diff --git a/apps/sim/lib/logs/types.ts b/apps/sim/lib/logs/types.ts index 2218df075..0c6fd2e53 100644 --- a/apps/sim/lib/logs/types.ts +++ b/apps/sim/lib/logs/types.ts @@ -93,10 +93,6 @@ export interface WorkflowExecutionLog { type: string url: string key: string - uploadedAt: string - expiresAt: string - storageProvider?: 's3' | 'blob' | 'local' - bucketName?: string }> // Execution details executionData: { diff --git a/apps/sim/lib/uploads/contexts/execution/execution-file-helpers.ts b/apps/sim/lib/uploads/contexts/execution/execution-file-helpers.ts index c5b674c93..9e36ec72e 100644 --- a/apps/sim/lib/uploads/contexts/execution/execution-file-helpers.ts +++ b/apps/sim/lib/uploads/contexts/execution/execution-file-helpers.ts @@ -10,27 +10,13 @@ export interface ExecutionContext { } /** - * File metadata stored in execution logs - now just uses UserFile directly - */ -export type ExecutionFileMetadata = UserFile - -/** - * Generate execution-scoped storage key - * Format: workspace_id/workflow_id/execution_id/filename + * Generate execution-scoped storage key with explicit prefix + * Format: execution/workspace_id/workflow_id/execution_id/filename */ export function generateExecutionFileKey(context: ExecutionContext, fileName: string): string { const { workspaceId, workflowId, executionId } = context const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_') - return `${workspaceId}/${workflowId}/${executionId}/${safeFileName}` -} - -/** - * Generate execution prefix for cleanup operations - * Format: workspace_id/workflow_id/execution_id/ - */ -export function generateExecutionPrefix(context: ExecutionContext): string { - const { workspaceId, workflowId, executionId } = context - return `${workspaceId}/${workflowId}/${executionId}/` + return `execution/${workspaceId}/${workflowId}/${executionId}/${safeFileName}` } /** @@ -41,31 +27,44 @@ export function generateFileId(): string { } /** - * Check if a user file is expired + * UUID pattern for validating execution context IDs */ -export function isFileExpired(userFile: UserFile): boolean { - return new Date(userFile.expiresAt) < new Date() +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i + +/** + * Check if a string matches UUID pattern + */ +export function isUuid(str: string): boolean { + return UUID_PATTERN.test(str) } /** - * Get file expiration date for execution files (5 minutes from now) + * Check if a key matches execution file pattern + * Execution files have keys in format: execution/workspaceId/workflowId/executionId/filename */ -export function getFileExpirationDate(): string { - return new Date(Date.now() + 5 * 60 * 1000).toISOString() +function matchesExecutionFilePattern(key: string): boolean { + if (!key || key.startsWith('/api/') || key.startsWith('http')) { + return false + } + + const parts = key.split('/') + + if (parts[0] === 'execution' && parts.length >= 5) { + const [, workspaceId, workflowId, executionId] = parts + return isUuid(workspaceId) && isUuid(workflowId) && isUuid(executionId) + } + + return false } /** * Check if a file is from execution storage based on its key pattern - * Execution files have keys in format: workspaceId/workflowId/executionId/filename - * Regular files have keys in format: timestamp-random-filename or just filename + * Execution files have keys in format: execution/workspaceId/workflowId/executionId/filename */ export function isExecutionFile(file: UserFile): boolean { if (!file.key) { return false } - // Execution files have at least 3 slashes in their key (4 parts) - // e.g., "workspace123/workflow456/execution789/document.pdf" - const parts = file.key.split('/') - return parts.length >= 4 && !file.key.startsWith('/api/') && !file.key.startsWith('http') + return matchesExecutionFilePattern(file.key) } diff --git a/apps/sim/lib/uploads/contexts/execution/execution-file-manager.ts b/apps/sim/lib/uploads/contexts/execution/execution-file-manager.ts index d51e1a440..bf61be0a7 100644 --- a/apps/sim/lib/uploads/contexts/execution/execution-file-manager.ts +++ b/apps/sim/lib/uploads/contexts/execution/execution-file-manager.ts @@ -1,14 +1,58 @@ import { createLogger } from '@/lib/logs/console/logger' +import { getBaseUrl } from '@/lib/urls/utils' +import { isUserFile } from '@/lib/utils' import type { UserFile } from '@/executor/types' import type { ExecutionContext } from './execution-file-helpers' -import { - generateExecutionFileKey, - generateFileId, - getFileExpirationDate, -} from './execution-file-helpers' +import { generateExecutionFileKey, generateFileId } from './execution-file-helpers' const logger = createLogger('ExecutionFileStorage') +function isSerializedBuffer(value: unknown): value is { type: string; data: number[] } { + return ( + !!value && + typeof value === 'object' && + (value as { type?: unknown }).type === 'Buffer' && + Array.isArray((value as { data?: unknown }).data) + ) +} + +function toBuffer(data: unknown, fileName: string): Buffer { + if (data === undefined || data === null) { + throw new Error(`File '${fileName}' has no data`) + } + + if (Buffer.isBuffer(data)) { + return data + } + + if (isSerializedBuffer(data)) { + return Buffer.from(data.data) + } + + if (data instanceof ArrayBuffer) { + return Buffer.from(data) + } + + if (ArrayBuffer.isView(data)) { + return Buffer.from(data.buffer, data.byteOffset, data.byteLength) + } + + if (Array.isArray(data)) { + return Buffer.from(data) + } + + if (typeof data === 'string') { + const trimmed = data.trim() + if (trimmed.startsWith('data:')) { + const [, base64Data] = trimmed.split(',') + return Buffer.from(base64Data ?? '', 'base64') + } + return Buffer.from(trimmed, 'base64') + } + + throw new Error(`File '${fileName}' has unsupported data format: ${typeof data}`) +} + /** * Upload a file to execution-scoped storage */ @@ -57,19 +101,24 @@ export async function uploadExecutionFile( metadata, // Pass metadata for cloud storage and database tracking }) + // Generate full URL for file access (useful for passing to external services) + const baseUrl = getBaseUrl() + const fullUrl = `${baseUrl}/api/files/serve/${fileInfo.key}` + const userFile: UserFile = { id: fileId, name: fileName, size: fileBuffer.length, type: contentType, - url: `/api/files/serve/${fileInfo.key}`, // Always use internal serve path for consistency + url: fullUrl, // Full URL for external access and downstream workflow usage key: fileInfo.key, - uploadedAt: new Date().toISOString(), - expiresAt: getFileExpirationDate(), context: 'execution', // Preserve context in file object } - logger.info(`Successfully uploaded execution file: ${fileName} (${fileBuffer.length} bytes)`) + logger.info(`Successfully uploaded execution file: ${fileName} (${fileBuffer.length} bytes)`, { + url: fullUrl, + key: fileInfo.key, + }) return userFile } catch (error) { logger.error(`Failed to upload execution file ${fileName}:`, error) @@ -105,48 +154,28 @@ export async function downloadExecutionFile(userFile: UserFile): Promise } /** - * Generate a short-lived presigned URL for file download (5 minutes) + * Convert raw file data (from tools/triggers) to UserFile + * Handles all common formats: Buffer, serialized Buffer, base64, data URLs */ -export async function generateExecutionFileDownloadUrl(userFile: UserFile): Promise { - logger.info(`Generating download URL for execution file: ${userFile.name}`) - logger.info(`File key: "${userFile.key}"`) - - try { - const { generatePresignedDownloadUrl } = await import('@/lib/uploads/core/storage-service') - const downloadUrl = await generatePresignedDownloadUrl( - userFile.key, - 'execution', - 5 * 60 // 5 minutes - ) - - logger.info(`Generated download URL for execution file: ${userFile.name}`) - return downloadUrl - } catch (error) { - logger.error(`Failed to generate download URL for ${userFile.name}:`, error) - throw new Error( - `Failed to generate download URL: ${error instanceof Error ? error.message : 'Unknown error'}` - ) - } -} - -/** - * Delete a file from execution-scoped storage - */ -export async function deleteExecutionFile(userFile: UserFile): Promise { - logger.info(`Deleting execution file: ${userFile.name}`) - - try { - const { deleteFile } = await import('@/lib/uploads/core/storage-service') - await deleteFile({ - key: userFile.key, - context: 'execution', - }) - - logger.info(`Successfully deleted execution file: ${userFile.name}`) - } catch (error) { - logger.error(`Failed to delete execution file ${userFile.name}:`, error) - throw new Error( - `Failed to delete file: ${error instanceof Error ? error.message : 'Unknown error'}` - ) +export async function uploadFileFromRawData( + rawData: { + name?: string + filename?: string + data?: unknown + mimeType?: string + contentType?: string + size?: number + }, + context: ExecutionContext, + userId?: string +): Promise { + if (isUserFile(rawData)) { + return rawData } + + const fileName = rawData.name || rawData.filename || 'file.bin' + const buffer = toBuffer(rawData.data, fileName) + const contentType = rawData.mimeType || rawData.contentType || 'application/octet-stream' + + return uploadExecutionFile(context, buffer, fileName, contentType, userId) } diff --git a/apps/sim/lib/uploads/contexts/execution/execution-file-server.ts b/apps/sim/lib/uploads/contexts/execution/execution-file-server.ts deleted file mode 100644 index fc206ff2c..000000000 --- a/apps/sim/lib/uploads/contexts/execution/execution-file-server.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { db } from '@sim/db' -import { workflowExecutionLogs } from '@sim/db/schema' -import { eq } from 'drizzle-orm' -import { createLogger } from '@/lib/logs/console/logger' -import type { ExecutionFileMetadata } from './execution-file-helpers' - -const logger = createLogger('ExecutionFilesServer') - -/** - * Retrieve file metadata from execution logs - */ -export async function getExecutionFiles(executionId: string): Promise { - try { - const log = await db - .select() - .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.executionId, executionId)) - .limit(1) - - if (log.length === 0) { - return [] - } - - return (log[0].files as ExecutionFileMetadata[]) || [] - } catch (error) { - logger.error(`Failed to retrieve file metadata for execution ${executionId}:`, error) - return [] - } -} - -/** - * Store file metadata in execution logs - */ -export async function storeExecutionFiles( - executionId: string, - files: ExecutionFileMetadata[] -): Promise { - try { - logger.info(`Storing ${files.length} file metadata entries for execution ${executionId}`) - - await db - .update(workflowExecutionLogs) - .set({ files }) - .where(eq(workflowExecutionLogs.executionId, executionId)) - - logger.info(`Successfully stored file metadata for execution ${executionId}`) - } catch (error) { - logger.error(`Failed to store file metadata for execution ${executionId}:`, error) - throw error - } -} - -/** - * Add file metadata to existing execution logs - */ -export async function addExecutionFile( - executionId: string, - fileMetadata: ExecutionFileMetadata -): Promise { - try { - const existingFiles = await getExecutionFiles(executionId) - - const updatedFiles = [...existingFiles, fileMetadata] - - await storeExecutionFiles(executionId, updatedFiles) - - logger.info(`Added file ${fileMetadata.name} to execution ${executionId}`) - } catch (error) { - logger.error(`Failed to add file to execution ${executionId}:`, error) - throw error - } -} - -/** - * Get all expired files across all executions - */ -export async function getExpiredFiles(): Promise { - try { - const now = new Date().toISOString() - - const logs = await db - .select() - .from(workflowExecutionLogs) - .where(eq(workflowExecutionLogs.level, 'info')) - - const expiredFiles: ExecutionFileMetadata[] = [] - - for (const log of logs) { - const files = log.files as ExecutionFileMetadata[] - if (files) { - const expired = files.filter((file) => file.expiresAt < now) - expiredFiles.push(...expired) - } - } - - return expiredFiles - } catch (error) { - logger.error('Failed to get expired files:', error) - return [] - } -} - -/** - * Remove expired file metadata from execution logs - */ -export async function cleanupExpiredFileMetadata(): Promise { - try { - const now = new Date().toISOString() - let cleanedCount = 0 - - const logs = await db.select().from(workflowExecutionLogs) - - for (const log of logs) { - const files = log.files as ExecutionFileMetadata[] - if (files && files.length > 0) { - const nonExpiredFiles = files.filter((file) => file.expiresAt >= now) - - if (nonExpiredFiles.length !== files.length) { - await db - .update(workflowExecutionLogs) - .set({ files: nonExpiredFiles.length > 0 ? nonExpiredFiles : null }) - .where(eq(workflowExecutionLogs.id, log.id)) - - cleanedCount += files.length - nonExpiredFiles.length - } - } - } - - logger.info(`Cleaned up ${cleanedCount} expired file metadata entries`) - return cleanedCount - } catch (error) { - logger.error('Failed to cleanup expired file metadata:', error) - return 0 - } -} diff --git a/apps/sim/lib/uploads/contexts/execution/index.ts b/apps/sim/lib/uploads/contexts/execution/index.ts index 3b0da0c96..5cfea0f0f 100644 --- a/apps/sim/lib/uploads/contexts/execution/index.ts +++ b/apps/sim/lib/uploads/contexts/execution/index.ts @@ -1,3 +1,2 @@ export * from './execution-file-helpers' export * from './execution-file-manager' -export * from './execution-file-server' diff --git a/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts b/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts index 5fe913430..fc4cf1f29 100644 --- a/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts +++ b/apps/sim/lib/uploads/contexts/workspace/workspace-file-manager.ts @@ -37,14 +37,54 @@ export interface WorkspaceFileRecord { } /** - * Generate workspace-scoped storage key - * Pattern: {workspaceId}/{timestamp}-{filename} + * UUID pattern for validating workspace IDs + */ +const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i + +/** + * Workspace file key pattern: workspace/{workspaceId}/{timestamp}-{random}-{filename} + */ +const WORKSPACE_KEY_PATTERN = /^workspace\/([a-f0-9-]{36})\/(\d+)-([a-z0-9]+)-(.+)$/ + +/** + * Check if a key matches workspace file pattern + * Format: workspace/{workspaceId}/{timestamp}-{random}-{filename} + */ +export function matchesWorkspaceFilePattern(key: string): boolean { + if (!key || key.startsWith('/api/') || key.startsWith('http')) { + return false + } + return WORKSPACE_KEY_PATTERN.test(key) +} + +/** + * Parse workspace file key to extract workspace ID + * Format: workspace/{workspaceId}/{timestamp}-{random}-{filename} + * @returns workspaceId if key matches pattern, null otherwise + */ +export function parseWorkspaceFileKey(key: string): string | null { + if (!matchesWorkspaceFilePattern(key)) { + return null + } + + const match = key.match(WORKSPACE_KEY_PATTERN) + if (!match) { + return null + } + + const workspaceId = match[1] + return UUID_PATTERN.test(workspaceId) ? workspaceId : null +} + +/** + * Generate workspace-scoped storage key with explicit prefix + * Format: workspace/{workspaceId}/{timestamp}-{random}-{filename} */ export function generateWorkspaceFileKey(workspaceId: string, fileName: string): string { const timestamp = Date.now() const random = Math.random().toString(36).substring(2, 9) const safeFileName = fileName.replace(/\s+/g, '-').replace(/[^a-zA-Z0-9.-]/g, '_') - return `${workspaceId}/${timestamp}-${random}-${safeFileName}` + return `workspace/${workspaceId}/${timestamp}-${random}-${safeFileName}` } /** @@ -152,8 +192,6 @@ export async function uploadWorkspaceFile( type: contentType, url: serveUrl, // Use authenticated serve URL (enforces context) key: uploadResult.key, - uploadedAt: new Date().toISOString(), - expiresAt: new Date(Date.now() + 100 * 365 * 24 * 60 * 60 * 1000).toISOString(), // Far future date (effectively never expires) context: 'workspace', } } catch (error) { diff --git a/apps/sim/lib/uploads/core/storage-service.ts b/apps/sim/lib/uploads/core/storage-service.ts index 196dd3abb..dd32c095f 100644 --- a/apps/sim/lib/uploads/core/storage-service.ts +++ b/apps/sim/lib/uploads/core/storage-service.ts @@ -245,7 +245,7 @@ export async function generatePresignedUploadUrl( const timestamp = Date.now() const uniqueId = Math.random().toString(36).substring(2, 9) const safeFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_') - const key = `${timestamp}-${uniqueId}-${safeFileName}` + const key = `${context}/${timestamp}-${uniqueId}-${safeFileName}` if (USE_S3_STORAGE) { return generateS3PresignedUrl( diff --git a/apps/sim/lib/uploads/shared/types.ts b/apps/sim/lib/uploads/shared/types.ts index 221c32890..07a0272ed 100644 --- a/apps/sim/lib/uploads/shared/types.ts +++ b/apps/sim/lib/uploads/shared/types.ts @@ -1,11 +1,11 @@ export type StorageContext = - | 'general' | 'knowledge-base' | 'chat' | 'copilot' | 'execution' | 'workspace' | 'profile-pictures' + | 'logs' export interface FileInfo { path: string diff --git a/apps/sim/lib/uploads/utils/file-utils.server.ts b/apps/sim/lib/uploads/utils/file-utils.server.ts index 86c666d41..8d04be2f7 100644 --- a/apps/sim/lib/uploads/utils/file-utils.server.ts +++ b/apps/sim/lib/uploads/utils/file-utils.server.ts @@ -2,25 +2,10 @@ import type { Logger } from '@/lib/logs/console/logger' import type { StorageContext } from '@/lib/uploads' +import { isExecutionFile } from '@/lib/uploads/contexts/execution/execution-file-helpers' import type { UserFile } from '@/executor/types' import { inferContextFromKey } from './file-utils' -/** - * Check if a file is from execution storage based on its key pattern - * Execution files have keys in format: workspaceId/workflowId/executionId/filename - * Regular files have keys in format: timestamp-random-filename or just filename - */ -function isExecutionFile(file: UserFile): boolean { - if (!file.key) { - return false - } - - // Execution files have at least 3 slashes in their key (4 parts) - // e.g., "workspace123/workflow456/execution789/document.pdf" - const parts = file.key.split('/') - return parts.length >= 4 && !file.key.startsWith('/api/') && !file.key.startsWith('http') -} - /** * Download a file from a URL (internal or external) * For internal URLs, uses direct storage access (server-side only) @@ -34,7 +19,7 @@ export async function downloadFileFromUrl(fileUrl: string, timeoutMs = 180000): try { if (isInternalFileUrl(fileUrl)) { - const { key, context } = parseInternalFileUrl(fileUrl, 'knowledge-base') + const { key, context } = parseInternalFileUrl(fileUrl) const { downloadFile } = await import('@/lib/uploads/core/storage-service') const buffer = await downloadFile({ key, context }) clearTimeout(timeoutId) diff --git a/apps/sim/lib/uploads/utils/file-utils.ts b/apps/sim/lib/uploads/utils/file-utils.ts index c171ba867..cffd1a7ba 100644 --- a/apps/sim/lib/uploads/utils/file-utils.ts +++ b/apps/sim/lib/uploads/utils/file-utils.ts @@ -234,47 +234,33 @@ export function isInternalFileUrl(fileUrl: string): boolean { } /** - * Infer storage context from file key pattern + * Infer storage context from file key using explicit prefixes + * All files must use prefixed keys */ export function inferContextFromKey(key: string): StorageContext { - // KB files always start with 'kb/' prefix - if (key.startsWith('kb/')) { - return 'knowledge-base' + if (!key) { + throw new Error('Cannot infer context from empty key') } - // Execution files: three or more UUID segments (workspace/workflow/execution/...) - // Pattern: {uuid}/{uuid}/{uuid}/{filename} - const segments = key.split('/') - if (segments.length >= 4 && segments[0].match(/^[a-f0-9-]{36}$/)) { - return 'execution' - } + if (key.startsWith('kb/')) return 'knowledge-base' + if (key.startsWith('chat/')) return 'chat' + if (key.startsWith('copilot/')) return 'copilot' + if (key.startsWith('execution/')) return 'execution' + if (key.startsWith('workspace/')) return 'workspace' + if (key.startsWith('profile-pictures/')) return 'profile-pictures' + if (key.startsWith('logs/')) return 'logs' - // Workspace files: UUID-like ID followed by timestamp pattern - // Pattern: {uuid}/{timestamp}-{random}-{filename} - if (key.match(/^[a-f0-9-]{36}\/\d+-[a-z0-9]+-/)) { - return 'workspace' - } - - // Copilot/General files: timestamp-random-filename (no path segments) - // Pattern: {timestamp}-{random}-{filename} - // NOTE: This is ambiguous - prefer explicit context parameter - if (key.match(/^\d+-[a-z0-9]+-/)) { - return 'general' - } - - return 'general' + throw new Error( + `File key must start with a context prefix (kb/, chat/, copilot/, execution/, workspace/, profile-pictures/, or logs/). Got: ${key}` + ) } /** * Extract storage key and context from an internal file URL * @param fileUrl - Internal file URL (e.g., /api/files/serve/key?context=workspace) - * @param defaultContext - Default context if not found in URL params * @returns Object with storage key and context */ -export function parseInternalFileUrl( - fileUrl: string, - defaultContext: StorageContext = 'general' -): { key: string; context: StorageContext } { +export function parseInternalFileUrl(fileUrl: string): { key: string; context: StorageContext } { const key = extractStorageKey(fileUrl) if (!key) { @@ -284,7 +270,7 @@ export function parseInternalFileUrl( const url = new URL(fileUrl.startsWith('http') ? fileUrl : `http://localhost${fileUrl}`) const contextParam = url.searchParams.get('context') - const context = (contextParam as StorageContext) || inferContextFromKey(key) || defaultContext + const context = (contextParam as StorageContext) || inferContextFromKey(key) return { key, context } } @@ -303,7 +289,23 @@ export interface RawFileInput { type?: string uploadedAt?: string | Date expiresAt?: string | Date - [key: string]: unknown // Allow additional properties for flexibility + context?: string +} + +/** + * Type guard to check if a RawFileInput has all UserFile required properties + */ +function isCompleteUserFile(file: RawFileInput): file is UserFile { + return ( + typeof file.id === 'string' && + typeof file.name === 'string' && + typeof file.url === 'string' && + typeof file.size === 'number' && + typeof file.type === 'string' && + typeof file.key === 'string' && + typeof file.uploadedAt === 'string' && + typeof file.expiresAt === 'string' + ) } /** @@ -319,8 +321,8 @@ export function processSingleFileToUserFile( requestId: string, logger: Logger ): UserFile { - if (file.id && file.key && file.uploadedAt) { - return file as UserFile + if (isCompleteUserFile(file)) { + return file } const storageKey = file.key || (file.path ? extractStorageKey(file.path) : null) @@ -337,16 +339,6 @@ export function processSingleFileToUserFile( size: file.size, type: file.type || 'application/octet-stream', key: storageKey, - uploadedAt: file.uploadedAt - ? typeof file.uploadedAt === 'string' - ? file.uploadedAt - : file.uploadedAt.toISOString() - : new Date().toISOString(), - expiresAt: file.expiresAt - ? typeof file.expiresAt === 'string' - ? file.expiresAt - : file.expiresAt.toISOString() - : new Date(Date.now() + 24 * 60 * 60 * 1000).toISOString(), } logger.info(`[${requestId}] Converted file to UserFile: ${userFile.name} (key: ${userFile.key})`) @@ -460,18 +452,22 @@ export function extractCleanFilename(urlOrPath: string): string { /** * Extract workspaceId from execution file key pattern - * Execution files have format: workspaceId/workflowId/executionId/filename + * Format: execution/workspaceId/workflowId/executionId/filename * @param key File storage key * @returns workspaceId if key matches execution file pattern, null otherwise */ export function extractWorkspaceIdFromExecutionKey(key: string): string | null { const segments = key.split('/') - if (segments.length >= 4) { - const workspaceId = segments[0] - if (workspaceId && /^[a-f0-9-]{36}$/.test(workspaceId)) { + + const UUID_PATTERN = /^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$/i + + if (segments[0] === 'execution' && segments.length >= 5) { + const workspaceId = segments[1] + if (workspaceId && UUID_PATTERN.test(workspaceId)) { return workspaceId } } + return null } diff --git a/apps/sim/lib/utils.ts b/apps/sim/lib/utils.ts index b596d3093..f84db0d7a 100644 --- a/apps/sim/lib/utils.ts +++ b/apps/sim/lib/utils.ts @@ -352,6 +352,92 @@ export const redactApiKeys = (obj: any): any => { return result } +/** + * Type guard to check if an object is a UserFile + */ +export function isUserFile(candidate: unknown): candidate is { + id: string + name: string + url: string + key: string + size: number + type: string + context?: string +} { + if (!candidate || typeof candidate !== 'object') { + return false + } + + const value = candidate as Record + return ( + typeof value.id === 'string' && + typeof value.key === 'string' && + typeof value.url === 'string' && + typeof value.name === 'string' + ) +} + +/** + * Filter function that transforms UserFile objects for display + * Removes internal fields: key, context + * Keeps user-friendly fields: id, name, url, size, type + */ +function filterUserFile(data: any): any { + if (isUserFile(data)) { + const { id, name, url, size, type } = data + return { id, name, url, size, type } + } + return data +} + +/** + * Registry of filter functions to apply to data for cleaner display in logs/console. + * Add new filter functions here to handle additional data types. + */ +const DISPLAY_FILTERS = [ + filterUserFile, + // Add more filters here as needed +] + +/** + * Generic helper to filter internal/technical fields from data for cleaner display in logs and console. + * Applies all registered filters recursively to the data structure. + * + * To add a new filter: + * 1. Create a filter function that checks and transforms a specific data type + * 2. Add it to the DISPLAY_FILTERS array above + * + * @param data - Data to filter (objects, arrays, primitives) + * @returns Filtered data with internal fields removed + */ +export function filterForDisplay(data: any): any { + if (!data || typeof data !== 'object') { + return data + } + + // Apply all registered filters + const filtered = data + for (const filterFn of DISPLAY_FILTERS) { + const result = filterFn(filtered) + if (result !== filtered) { + // Filter matched and transformed the data + return result + } + } + + // No filters matched - recursively filter nested structures + if (Array.isArray(filtered)) { + return filtered.map(filterForDisplay) + } + + // Recursively filter object properties + const result: any = {} + for (const [key, value] of Object.entries(filtered)) { + result[key] = filterForDisplay(value) + } + return result +} + /** * Validates a name by removing any characters that could cause issues * with variable references or node naming. diff --git a/apps/sim/lib/webhooks/attachment-processor.ts b/apps/sim/lib/webhooks/attachment-processor.ts index b12fd28ed..12fbf812d 100644 --- a/apps/sim/lib/webhooks/attachment-processor.ts +++ b/apps/sim/lib/webhooks/attachment-processor.ts @@ -1,5 +1,5 @@ import { createLogger } from '@/lib/logs/console/logger' -import { uploadExecutionFile } from '@/lib/uploads/contexts/execution' +import { uploadFileFromRawData } from '@/lib/uploads/contexts/execution' import type { UserFile } from '@/executor/types' const logger = createLogger('WebhookAttachmentProcessor') @@ -7,7 +7,8 @@ const logger = createLogger('WebhookAttachmentProcessor') export interface WebhookAttachment { name: string data: Buffer - contentType: string + contentType?: string + mimeType?: string size: number } @@ -27,6 +28,7 @@ export class WebhookAttachmentProcessor { workflowId: string executionId: string requestId: string + userId?: string } ): Promise { if (!attachments || attachments.length === 0) { @@ -72,47 +74,17 @@ export class WebhookAttachmentProcessor { workflowId: string executionId: string requestId: string + userId?: string } ): Promise { - // Convert data to Buffer (handle both raw and serialized formats) - let buffer: Buffer - const data = attachment.data as any - - if (Buffer.isBuffer(data)) { - // Raw Buffer (e.g., Teams in-memory processing) - buffer = data - } else if ( - data && - typeof data === 'object' && - data.type === 'Buffer' && - Array.isArray(data.data) - ) { - // Serialized Buffer (e.g., Gmail/Outlook after JSON roundtrip) - buffer = Buffer.from(data.data) - } else { - throw new Error(`Attachment '${attachment.name}' data must be a Buffer or serialized Buffer`) - } - - if (buffer.length === 0) { - throw new Error(`Attachment '${attachment.name}' has zero bytes`) - } - - logger.info( - `[${executionContext.requestId}] Uploading attachment '${attachment.name}' (${attachment.size} bytes, ${attachment.contentType})` - ) - - // Upload to execution storage - const userFile = await uploadExecutionFile( + return uploadFileFromRawData( + { + name: attachment.name, + data: attachment.data, + mimeType: attachment.contentType || attachment.mimeType, + }, executionContext, - buffer, - attachment.name, - attachment.contentType + executionContext.userId ) - - logger.info( - `[${executionContext.requestId}] Successfully stored attachment '${attachment.name}' with key: ${userFile.key}` - ) - - return userFile } } diff --git a/apps/sim/lib/workflows/block-outputs.ts b/apps/sim/lib/workflows/block-outputs.ts index 27fb94809..5a9c2f7b6 100644 --- a/apps/sim/lib/workflows/block-outputs.ts +++ b/apps/sim/lib/workflows/block-outputs.ts @@ -190,13 +190,12 @@ export function getBlockOutputPaths( if (value.type === 'files') { // Show properties without [0] for cleaner display // The tag dropdown will add [0] automatically when inserting - paths.push(`${path}.url`) + // Only expose user-accessible fields (matches UserFile interface) + paths.push(`${path}.id`) paths.push(`${path}.name`) + paths.push(`${path}.url`) paths.push(`${path}.size`) paths.push(`${path}.type`) - paths.push(`${path}.key`) - paths.push(`${path}.uploadedAt`) - paths.push(`${path}.expiresAt`) } else { paths.push(path) } @@ -232,18 +231,15 @@ export function getBlockOutputType( const pathParts = cleanPath.split('.').filter(Boolean) const filePropertyTypes: Record = { - url: 'string', + id: 'string', name: 'string', + url: 'string', size: 'number', type: 'string', - key: 'string', - uploadedAt: 'string', - expiresAt: 'string', } const lastPart = pathParts[pathParts.length - 1] if (lastPart && filePropertyTypes[lastPart]) { - const parentPath = pathParts.slice(0, -1).join('.') let current: any = outputs for (const part of pathParts.slice(0, -1)) { if (!current || typeof current !== 'object') break diff --git a/apps/sim/lib/workflows/executor/execution-core.ts b/apps/sim/lib/workflows/executor/execution-core.ts index c5b872251..96706f602 100644 --- a/apps/sim/lib/workflows/executor/execution-core.ts +++ b/apps/sim/lib/workflows/executor/execution-core.ts @@ -109,8 +109,6 @@ export async function executeWorkflowCore( let processedInput = input || {} try { - const startTime = new Date() - let blocks let edges: Edge[] let loops @@ -270,6 +268,7 @@ export async function executeWorkflowCore( selectedOutputs, executionId, workspaceId: providedWorkspaceId, + userId, isDeployedContext: triggerType !== 'manual', onBlockStart, onBlockComplete, diff --git a/apps/sim/package.json b/apps/sim/package.json index c0f7ee640..0fa1e7470 100644 --- a/apps/sim/package.json +++ b/apps/sim/package.json @@ -95,7 +95,7 @@ "officeparser": "^5.2.0", "openai": "^4.91.1", "papaparse": "5.5.3", - "pdf-parse": "1.1.1", + "pdf-parse": "2.4.5", "posthog-js": "1.268.9", "posthog-node": "5.9.2", "prismjs": "^1.30.0", diff --git a/apps/sim/tools/elevenlabs/tts.ts b/apps/sim/tools/elevenlabs/tts.ts index ee132711d..302f069b1 100644 --- a/apps/sim/tools/elevenlabs/tts.ts +++ b/apps/sim/tools/elevenlabs/tts.ts @@ -40,11 +40,18 @@ export const elevenLabsTtsTool: ToolConfig ({ 'Content-Type': 'application/json', }), - body: (params) => ({ + body: ( + params: ElevenLabsTtsParams & { + _context?: { workspaceId?: string; workflowId?: string; executionId?: string } + } + ) => ({ apiKey: params.apiKey, text: params.text, voiceId: params.voiceId, modelId: params.modelId || 'eleven_monolingual_v1', + workspaceId: params._context?.workspaceId, + workflowId: params._context?.workflowId, + executionId: params._context?.executionId, }), }, @@ -65,11 +72,13 @@ export const elevenLabsTtsTool: ToolConfig = { + id: 'gmail_add_label', + name: 'Gmail Add Label', + description: 'Add label(s) to a Gmail message', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to add labels to', + }, + labelIds: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Comma-separated label IDs to add (e.g., INBOX, Label_123)', + }, + }, + + request: { + url: '/api/tools/gmail/add-label', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailLabelParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + labelIds: params.labelIds, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to add label(s)', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/archive.ts b/apps/sim/tools/gmail/archive.ts new file mode 100644 index 000000000..313540075 --- /dev/null +++ b/apps/sim/tools/gmail/archive.ts @@ -0,0 +1,78 @@ +import type { GmailMarkReadParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailArchiveTool: ToolConfig = { + id: 'gmail_archive', + name: 'Gmail Archive', + description: 'Archive a Gmail message (remove from inbox)', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to archive', + }, + }, + + request: { + url: '/api/tools/gmail/archive', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to archive email', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/delete.ts b/apps/sim/tools/gmail/delete.ts new file mode 100644 index 000000000..316e9f14b --- /dev/null +++ b/apps/sim/tools/gmail/delete.ts @@ -0,0 +1,78 @@ +import type { GmailMarkReadParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailDeleteTool: ToolConfig = { + id: 'gmail_delete', + name: 'Gmail Delete', + description: 'Delete a Gmail message (move to trash)', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to delete', + }, + }, + + request: { + url: '/api/tools/gmail/delete', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to delete email', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/index.ts b/apps/sim/tools/gmail/index.ts index 110147cad..78819b333 100644 --- a/apps/sim/tools/gmail/index.ts +++ b/apps/sim/tools/gmail/index.ts @@ -1,6 +1,27 @@ +import { gmailAddLabelTool } from '@/tools/gmail/add_label' +import { gmailArchiveTool } from '@/tools/gmail/archive' +import { gmailDeleteTool } from '@/tools/gmail/delete' import { gmailDraftTool } from '@/tools/gmail/draft' +import { gmailMarkReadTool } from '@/tools/gmail/mark_read' +import { gmailMarkUnreadTool } from '@/tools/gmail/mark_unread' +import { gmailMoveTool } from '@/tools/gmail/move' import { gmailReadTool } from '@/tools/gmail/read' +import { gmailRemoveLabelTool } from '@/tools/gmail/remove_label' import { gmailSearchTool } from '@/tools/gmail/search' import { gmailSendTool } from '@/tools/gmail/send' +import { gmailUnarchiveTool } from '@/tools/gmail/unarchive' -export { gmailSendTool, gmailReadTool, gmailSearchTool, gmailDraftTool } +export { + gmailSendTool, + gmailReadTool, + gmailSearchTool, + gmailDraftTool, + gmailMoveTool, + gmailMarkReadTool, + gmailMarkUnreadTool, + gmailArchiveTool, + gmailUnarchiveTool, + gmailDeleteTool, + gmailAddLabelTool, + gmailRemoveLabelTool, +} diff --git a/apps/sim/tools/gmail/mark_read.ts b/apps/sim/tools/gmail/mark_read.ts new file mode 100644 index 000000000..ae26af352 --- /dev/null +++ b/apps/sim/tools/gmail/mark_read.ts @@ -0,0 +1,78 @@ +import type { GmailMarkReadParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailMarkReadTool: ToolConfig = { + id: 'gmail_mark_read', + name: 'Gmail Mark as Read', + description: 'Mark a Gmail message as read', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to mark as read', + }, + }, + + request: { + url: '/api/tools/gmail/mark-read', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to mark email as read', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/mark_unread.ts b/apps/sim/tools/gmail/mark_unread.ts new file mode 100644 index 000000000..e8718aa11 --- /dev/null +++ b/apps/sim/tools/gmail/mark_unread.ts @@ -0,0 +1,78 @@ +import type { GmailMarkReadParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailMarkUnreadTool: ToolConfig = { + id: 'gmail_mark_unread', + name: 'Gmail Mark as Unread', + description: 'Mark a Gmail message as unread', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to mark as unread', + }, + }, + + request: { + url: '/api/tools/gmail/mark-unread', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to mark email as unread', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/move.ts b/apps/sim/tools/gmail/move.ts new file mode 100644 index 000000000..4db41e66d --- /dev/null +++ b/apps/sim/tools/gmail/move.ts @@ -0,0 +1,92 @@ +import type { GmailMoveParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailMoveTool: ToolConfig = { + id: 'gmail_move', + name: 'Gmail Move', + description: 'Move emails between Gmail labels/folders', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to move', + }, + addLabelIds: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Comma-separated label IDs to add (e.g., INBOX, Label_123)', + }, + removeLabelIds: { + type: 'string', + required: false, + visibility: 'user-or-llm', + description: 'Comma-separated label IDs to remove (e.g., INBOX, SPAM)', + }, + }, + + request: { + url: '/api/tools/gmail/move', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMoveParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + addLabelIds: params.addLabelIds, + removeLabelIds: params.removeLabelIds, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to move email', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/remove_label.ts b/apps/sim/tools/gmail/remove_label.ts new file mode 100644 index 000000000..45960f19b --- /dev/null +++ b/apps/sim/tools/gmail/remove_label.ts @@ -0,0 +1,85 @@ +import type { GmailLabelParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailRemoveLabelTool: ToolConfig = { + id: 'gmail_remove_label', + name: 'Gmail Remove Label', + description: 'Remove label(s) from a Gmail message', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to remove labels from', + }, + labelIds: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Comma-separated label IDs to remove (e.g., INBOX, Label_123)', + }, + }, + + request: { + url: '/api/tools/gmail/remove-label', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailLabelParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + labelIds: params.labelIds, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to remove label(s)', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/gmail/types.ts b/apps/sim/tools/gmail/types.ts index 429e1d617..00126ac4b 100644 --- a/apps/sim/tools/gmail/types.ts +++ b/apps/sim/tools/gmail/types.ts @@ -33,8 +33,32 @@ export interface GmailSearchParams extends BaseGmailParams { maxResults?: number } +// Move operation parameters +export interface GmailMoveParams extends BaseGmailParams { + messageId: string + addLabelIds: string + removeLabelIds?: string +} + +// Mark as read/unread parameters (reuses simple messageId pattern) +export interface GmailMarkReadParams extends BaseGmailParams { + messageId: string +} + +// Label management parameters +export interface GmailLabelParams extends BaseGmailParams { + messageId: string + labelIds: string +} + // Union type for all Gmail tool parameters -export type GmailToolParams = GmailSendParams | GmailReadParams | GmailSearchParams +export type GmailToolParams = + | GmailSendParams + | GmailReadParams + | GmailSearchParams + | GmailMoveParams + | GmailMarkReadParams + | GmailLabelParams // Response metadata interface BaseGmailMetadata { diff --git a/apps/sim/tools/gmail/unarchive.ts b/apps/sim/tools/gmail/unarchive.ts new file mode 100644 index 000000000..6f4031d96 --- /dev/null +++ b/apps/sim/tools/gmail/unarchive.ts @@ -0,0 +1,78 @@ +import type { GmailMarkReadParams, GmailToolResponse } from '@/tools/gmail/types' +import type { ToolConfig } from '@/tools/types' + +export const gmailUnarchiveTool: ToolConfig = { + id: 'gmail_unarchive', + name: 'Gmail Unarchive', + description: 'Unarchive a Gmail message (move back to inbox)', + version: '1.0.0', + + oauth: { + required: true, + provider: 'google-email', + additionalScopes: ['https://www.googleapis.com/auth/gmail.modify'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Gmail API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to unarchive', + }, + }, + + request: { + url: '/api/tools/gmail/unarchive', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: GmailMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to unarchive email', + metadata: {}, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Email metadata', + properties: { + id: { type: 'string', description: 'Gmail message ID' }, + threadId: { type: 'string', description: 'Gmail thread ID' }, + labelIds: { type: 'array', items: { type: 'string' }, description: 'Updated email labels' }, + }, + }, + }, +} diff --git a/apps/sim/tools/google_drive/download.ts b/apps/sim/tools/google_drive/download.ts new file mode 100644 index 000000000..36852e637 --- /dev/null +++ b/apps/sim/tools/google_drive/download.ts @@ -0,0 +1,171 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { GoogleDriveDownloadResponse, GoogleDriveToolParams } from '@/tools/google_drive/types' +import { DEFAULT_EXPORT_FORMATS, GOOGLE_WORKSPACE_MIME_TYPES } from '@/tools/google_drive/utils' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('GoogleDriveDownloadTool') + +export const downloadTool: ToolConfig = { + id: 'google_drive_download', + name: 'Download File from Google Drive', + description: 'Download a file from Google Drive (exports Google Workspace files automatically)', + version: '1.0', + + oauth: { + required: true, + provider: 'google-drive', + additionalScopes: [ + 'https://www.googleapis.com/auth/drive.readonly', + 'https://www.googleapis.com/auth/drive.file', + ], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The access token for the Google Drive API', + }, + fileId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'The ID of the file to download', + }, + mimeType: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'The MIME type to export Google Workspace files to (optional)', + }, + fileName: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Optional filename override', + }, + }, + + request: { + url: (params) => + `https://www.googleapis.com/drive/v3/files/${params.fileId}?fields=id,name,mimeType`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + }), + }, + + transformResponse: async (response: Response, params?: GoogleDriveToolParams) => { + try { + if (!response.ok) { + const errorDetails = await response.json().catch(() => ({})) + logger.error('Failed to get file metadata', { + status: response.status, + statusText: response.statusText, + error: errorDetails, + }) + throw new Error(errorDetails.error?.message || 'Failed to get file metadata') + } + + const metadata = await response.json() + const fileId = metadata.id + const mimeType = metadata.mimeType + const authHeader = `Bearer ${params?.accessToken || ''}` + + let fileBuffer: Buffer + let finalMimeType = mimeType + + if (GOOGLE_WORKSPACE_MIME_TYPES.includes(mimeType)) { + const exportFormat = params?.mimeType || DEFAULT_EXPORT_FORMATS[mimeType] || 'text/plain' + finalMimeType = exportFormat + + logger.info('Exporting Google Workspace file', { + fileId, + mimeType, + exportFormat, + }) + + const exportResponse = await fetch( + `https://www.googleapis.com/drive/v3/files/${fileId}/export?mimeType=${encodeURIComponent(exportFormat)}`, + { + headers: { + Authorization: authHeader, + }, + } + ) + + if (!exportResponse.ok) { + const exportError = await exportResponse.json().catch(() => ({})) + logger.error('Failed to export file', { + status: exportResponse.status, + statusText: exportResponse.statusText, + error: exportError, + }) + throw new Error(exportError.error?.message || 'Failed to export Google Workspace file') + } + + const arrayBuffer = await exportResponse.arrayBuffer() + fileBuffer = Buffer.from(arrayBuffer) + } else { + logger.info('Downloading regular file', { + fileId, + mimeType, + }) + + const downloadResponse = await fetch( + `https://www.googleapis.com/drive/v3/files/${fileId}?alt=media`, + { + headers: { + Authorization: authHeader, + }, + } + ) + + if (!downloadResponse.ok) { + const downloadError = await downloadResponse.json().catch(() => ({})) + logger.error('Failed to download file', { + status: downloadResponse.status, + statusText: downloadResponse.statusText, + error: downloadError, + }) + throw new Error(downloadError.error?.message || 'Failed to download file') + } + + const arrayBuffer = await downloadResponse.arrayBuffer() + fileBuffer = Buffer.from(arrayBuffer) + } + + const resolvedName = params?.fileName || metadata.name || 'download' + + logger.info('File downloaded successfully', { + fileId, + name: resolvedName, + size: fileBuffer.length, + mimeType: finalMimeType, + }) + + return { + success: true, + output: { + file: { + name: resolvedName, + mimeType: finalMimeType, + data: fileBuffer, + size: fileBuffer.length, + }, + }, + } + } catch (error: any) { + logger.error('Error in transform response', { + error: error.message, + stack: error.stack, + }) + throw error + } + }, + + outputs: { + file: { type: 'file', description: 'Downloaded file stored in execution files' }, + }, +} diff --git a/apps/sim/tools/google_drive/index.ts b/apps/sim/tools/google_drive/index.ts index 634223ccd..4d1a6f167 100644 --- a/apps/sim/tools/google_drive/index.ts +++ b/apps/sim/tools/google_drive/index.ts @@ -1,9 +1,11 @@ import { createFolderTool } from '@/tools/google_drive/create_folder' +import { downloadTool } from '@/tools/google_drive/download' import { getContentTool } from '@/tools/google_drive/get_content' import { listTool } from '@/tools/google_drive/list' import { uploadTool } from '@/tools/google_drive/upload' export const googleDriveCreateFolderTool = createFolderTool +export const googleDriveDownloadTool = downloadTool export const googleDriveGetContentTool = getContentTool export const googleDriveListTool = listTool export const googleDriveUploadTool = uploadTool diff --git a/apps/sim/tools/google_drive/types.ts b/apps/sim/tools/google_drive/types.ts index c9e7cb1c6..d45552ba0 100644 --- a/apps/sim/tools/google_drive/types.ts +++ b/apps/sim/tools/google_drive/types.ts @@ -32,6 +32,17 @@ export interface GoogleDriveGetContentResponse extends ToolResponse { } } +export interface GoogleDriveDownloadResponse extends ToolResponse { + output: { + file: { + name: string + mimeType: string + data: Buffer + size: number + } + } +} + export interface GoogleDriveToolParams { accessToken: string folderId?: string @@ -50,4 +61,5 @@ export interface GoogleDriveToolParams { export type GoogleDriveResponse = | GoogleDriveUploadResponse | GoogleDriveGetContentResponse + | GoogleDriveDownloadResponse | GoogleDriveListResponse diff --git a/apps/sim/tools/onedrive/download.ts b/apps/sim/tools/onedrive/download.ts new file mode 100644 index 000000000..4106caba9 --- /dev/null +++ b/apps/sim/tools/onedrive/download.ts @@ -0,0 +1,131 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { OneDriveDownloadResponse, OneDriveToolParams } from '@/tools/onedrive/types' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('OneDriveDownloadTool') + +export const downloadTool: ToolConfig = { + id: 'onedrive_download', + name: 'Download File from OneDrive', + description: 'Download a file from OneDrive', + version: '1.0', + + oauth: { + required: true, + provider: 'onedrive', + additionalScopes: ['Files.Read', 'Files.ReadWrite', 'offline_access'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'The access token for the Microsoft Graph API', + }, + fileId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'The ID of the file to download', + }, + fileName: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Optional filename override', + }, + }, + + request: { + url: (params) => { + return `https://graph.microsoft.com/v1.0/me/drive/items/${params.fileId}` + }, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + }), + }, + + transformResponse: async (response: Response, params?: OneDriveToolParams) => { + try { + if (!response.ok) { + const errorDetails = await response.json().catch(() => ({})) + logger.error('Failed to get file metadata', { + status: response.status, + statusText: response.statusText, + error: errorDetails, + requestedFileId: params?.fileId, + }) + throw new Error(errorDetails.error?.message || 'Failed to get file metadata') + } + + const metadata = await response.json() + + // Check if this is actually a folder + if (metadata.folder && !metadata.file) { + logger.error('Attempted to download a folder instead of a file', { + itemId: metadata.id, + itemName: metadata.name, + isFolder: true, + }) + throw new Error(`Cannot download folder "${metadata.name}". Please select a file instead.`) + } + + const fileId = metadata.id + const fileName = metadata.name + const mimeType = metadata.file?.mimeType || 'application/octet-stream' + const authHeader = `Bearer ${params?.accessToken || ''}` + + const downloadResponse = await fetch( + `https://graph.microsoft.com/v1.0/me/drive/items/${fileId}/content`, + { + headers: { + Authorization: authHeader, + }, + } + ) + + if (!downloadResponse.ok) { + const downloadError = await downloadResponse.json().catch(() => ({})) + logger.error('Failed to download file', { + status: downloadResponse.status, + statusText: downloadResponse.statusText, + error: downloadError, + }) + throw new Error(downloadError.error?.message || 'Failed to download file') + } + + const arrayBuffer = await downloadResponse.arrayBuffer() + const fileBuffer = Buffer.from(arrayBuffer) + + const resolvedName = params?.fileName || fileName || 'download' + + // Convert buffer to base64 string for proper JSON serialization + // This ensures the file data survives the proxy round-trip + const base64Data = fileBuffer.toString('base64') + + return { + success: true, + output: { + file: { + name: resolvedName, + mimeType, + data: base64Data, + size: fileBuffer.length, + }, + }, + } + } catch (error: any) { + logger.error('Error in transform response', { + error: error.message, + stack: error.stack, + }) + throw error + } + }, + + outputs: { + file: { type: 'file', description: 'Downloaded file stored in execution files' }, + }, +} diff --git a/apps/sim/tools/onedrive/index.ts b/apps/sim/tools/onedrive/index.ts index 30298d9d7..146bcd822 100644 --- a/apps/sim/tools/onedrive/index.ts +++ b/apps/sim/tools/onedrive/index.ts @@ -1,7 +1,9 @@ import { createFolderTool } from '@/tools/onedrive/create_folder' +import { downloadTool } from '@/tools/onedrive/download' import { listTool } from '@/tools/onedrive/list' import { uploadTool } from '@/tools/onedrive/upload' export const onedriveCreateFolderTool = createFolderTool +export const onedriveDownloadTool = downloadTool export const onedriveListTool = listTool export const onedriveUploadTool = uploadTool diff --git a/apps/sim/tools/onedrive/types.ts b/apps/sim/tools/onedrive/types.ts index 9f4ca181f..ae7820ae4 100644 --- a/apps/sim/tools/onedrive/types.ts +++ b/apps/sim/tools/onedrive/types.ts @@ -19,6 +19,17 @@ export interface MicrosoftGraphDriveItem { driveId: string path: string } + thumbnails?: Array<{ + small?: { url: string } + medium?: { url: string } + large?: { url: string } + }> + createdBy?: { + user?: { + displayName?: string + email?: string + } + } } export interface OneDriveFile { @@ -55,6 +66,17 @@ export interface OneDriveUploadResponse extends ToolResponse { } } +export interface OneDriveDownloadResponse extends ToolResponse { + output: { + file: { + name: string + mimeType: string + data: Buffer | string // Buffer for direct use, string for base64-encoded data + size: number + } + } +} + export interface OneDriveToolParams { accessToken: string folderSelector?: string @@ -73,4 +95,7 @@ export interface OneDriveToolParams { values?: (string | number | boolean | null)[][] } -export type OneDriveResponse = OneDriveUploadResponse | OneDriveListResponse +export type OneDriveResponse = + | OneDriveUploadResponse + | OneDriveDownloadResponse + | OneDriveListResponse diff --git a/apps/sim/tools/outlook/copy.ts b/apps/sim/tools/outlook/copy.ts new file mode 100644 index 000000000..2ecda538f --- /dev/null +++ b/apps/sim/tools/outlook/copy.ts @@ -0,0 +1,74 @@ +import type { OutlookCopyParams, OutlookCopyResponse } from '@/tools/outlook/types' +import type { ToolConfig } from '@/tools/types' + +export const outlookCopyTool: ToolConfig = { + id: 'outlook_copy', + name: 'Outlook Copy', + description: 'Copy an Outlook message to another folder', + version: '1.0.0', + + oauth: { + required: true, + provider: 'outlook', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Outlook API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to copy', + }, + destinationId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the destination folder', + }, + }, + + request: { + url: '/api/tools/outlook/copy', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: OutlookCopyParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + destinationId: params.destinationId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to copy Outlook email') + } + return { + success: true, + output: { + message: data.output.message, + results: { + originalMessageId: data.output.originalMessageId, + copiedMessageId: data.output.copiedMessageId, + destinationFolderId: data.output.destinationFolderId, + }, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Email copy success status' }, + message: { type: 'string', description: 'Success or error message' }, + originalMessageId: { type: 'string', description: 'ID of the original message' }, + copiedMessageId: { type: 'string', description: 'ID of the copied message' }, + destinationFolderId: { type: 'string', description: 'ID of the destination folder' }, + }, +} diff --git a/apps/sim/tools/outlook/delete.ts b/apps/sim/tools/outlook/delete.ts new file mode 100644 index 000000000..981926fdc --- /dev/null +++ b/apps/sim/tools/outlook/delete.ts @@ -0,0 +1,65 @@ +import type { OutlookDeleteParams, OutlookDeleteResponse } from '@/tools/outlook/types' +import type { ToolConfig } from '@/tools/types' + +export const outlookDeleteTool: ToolConfig = { + id: 'outlook_delete', + name: 'Outlook Delete', + description: 'Delete an Outlook message (move to Deleted Items)', + version: '1.0.0', + + oauth: { + required: true, + provider: 'outlook', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Outlook API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to delete', + }, + }, + + request: { + url: '/api/tools/outlook/delete', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: OutlookDeleteParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to delete Outlook email') + } + return { + success: true, + output: { + message: data.output.message, + results: { + messageId: data.output.messageId, + status: data.output.status, + }, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + message: { type: 'string', description: 'Success or error message' }, + messageId: { type: 'string', description: 'ID of the deleted message' }, + status: { type: 'string', description: 'Deletion status' }, + }, +} diff --git a/apps/sim/tools/outlook/index.ts b/apps/sim/tools/outlook/index.ts index 63a918b9d..dbb135f48 100644 --- a/apps/sim/tools/outlook/index.ts +++ b/apps/sim/tools/outlook/index.ts @@ -1,6 +1,21 @@ +import { outlookCopyTool } from '@/tools/outlook/copy' +import { outlookDeleteTool } from '@/tools/outlook/delete' import { outlookDraftTool } from '@/tools/outlook/draft' import { outlookForwardTool } from '@/tools/outlook/forward' +import { outlookMarkReadTool } from '@/tools/outlook/mark_read' +import { outlookMarkUnreadTool } from '@/tools/outlook/mark_unread' +import { outlookMoveTool } from '@/tools/outlook/move' import { outlookReadTool } from '@/tools/outlook/read' import { outlookSendTool } from '@/tools/outlook/send' -export { outlookDraftTool, outlookForwardTool, outlookReadTool, outlookSendTool } +export { + outlookDraftTool, + outlookForwardTool, + outlookMoveTool, + outlookReadTool, + outlookSendTool, + outlookMarkReadTool, + outlookMarkUnreadTool, + outlookDeleteTool, + outlookCopyTool, +} diff --git a/apps/sim/tools/outlook/mark_read.ts b/apps/sim/tools/outlook/mark_read.ts new file mode 100644 index 000000000..1036f6af9 --- /dev/null +++ b/apps/sim/tools/outlook/mark_read.ts @@ -0,0 +1,65 @@ +import type { OutlookMarkReadParams, OutlookMarkReadResponse } from '@/tools/outlook/types' +import type { ToolConfig } from '@/tools/types' + +export const outlookMarkReadTool: ToolConfig = { + id: 'outlook_mark_read', + name: 'Outlook Mark as Read', + description: 'Mark an Outlook message as read', + version: '1.0.0', + + oauth: { + required: true, + provider: 'outlook', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Outlook API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to mark as read', + }, + }, + + request: { + url: '/api/tools/outlook/mark-read', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: OutlookMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to mark Outlook email as read') + } + return { + success: true, + output: { + message: data.output.message, + results: { + messageId: data.output.messageId, + isRead: data.output.isRead, + }, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + message: { type: 'string', description: 'Success or error message' }, + messageId: { type: 'string', description: 'ID of the message' }, + isRead: { type: 'boolean', description: 'Read status of the message' }, + }, +} diff --git a/apps/sim/tools/outlook/mark_unread.ts b/apps/sim/tools/outlook/mark_unread.ts new file mode 100644 index 000000000..abf292639 --- /dev/null +++ b/apps/sim/tools/outlook/mark_unread.ts @@ -0,0 +1,65 @@ +import type { OutlookMarkReadParams, OutlookMarkReadResponse } from '@/tools/outlook/types' +import type { ToolConfig } from '@/tools/types' + +export const outlookMarkUnreadTool: ToolConfig = { + id: 'outlook_mark_unread', + name: 'Outlook Mark as Unread', + description: 'Mark an Outlook message as unread', + version: '1.0.0', + + oauth: { + required: true, + provider: 'outlook', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Outlook API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to mark as unread', + }, + }, + + request: { + url: '/api/tools/outlook/mark-unread', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: OutlookMarkReadParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to mark Outlook email as unread') + } + return { + success: true, + output: { + message: data.output.message, + results: { + messageId: data.output.messageId, + isRead: data.output.isRead, + }, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Operation success status' }, + message: { type: 'string', description: 'Success or error message' }, + messageId: { type: 'string', description: 'ID of the message' }, + isRead: { type: 'boolean', description: 'Read status of the message' }, + }, +} diff --git a/apps/sim/tools/outlook/move.ts b/apps/sim/tools/outlook/move.ts new file mode 100644 index 000000000..aaeb5113d --- /dev/null +++ b/apps/sim/tools/outlook/move.ts @@ -0,0 +1,72 @@ +import type { OutlookMoveParams, OutlookMoveResponse } from '@/tools/outlook/types' +import type { ToolConfig } from '@/tools/types' + +export const outlookMoveTool: ToolConfig = { + id: 'outlook_move', + name: 'Outlook Move', + description: 'Move emails between Outlook folders', + version: '1.0.0', + + oauth: { + required: true, + provider: 'outlook', + }, + + params: { + accessToken: { + type: 'string', + required: true, + visibility: 'hidden', + description: 'Access token for Outlook API', + }, + messageId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the message to move', + }, + destinationId: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'ID of the destination folder', + }, + }, + + request: { + url: '/api/tools/outlook/move', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: OutlookMoveParams) => ({ + accessToken: params.accessToken, + messageId: params.messageId, + destinationId: params.destinationId, + }), + }, + + transformResponse: async (response) => { + const data = await response.json() + if (!data.success) { + throw new Error(data.error || 'Failed to move Outlook email') + } + return { + success: true, + output: { + message: data.output.message, + results: { + messageId: data.output.messageId, + newFolderId: data.output.newFolderId, + }, + }, + } + }, + + outputs: { + success: { type: 'boolean', description: 'Email move success status' }, + message: { type: 'string', description: 'Success or error message' }, + messageId: { type: 'string', description: 'ID of the moved message' }, + newFolderId: { type: 'string', description: 'ID of the destination folder' }, + }, +} diff --git a/apps/sim/tools/outlook/types.ts b/apps/sim/tools/outlook/types.ts index 7bb2fe4de..99a6a0fa1 100644 --- a/apps/sim/tools/outlook/types.ts +++ b/apps/sim/tools/outlook/types.ts @@ -163,4 +163,73 @@ export interface OutlookForwardResponse extends ToolResponse { } } -export type OutlookExtendedResponse = OutlookResponse | OutlookForwardResponse +export interface OutlookMoveParams { + accessToken: string + messageId: string + destinationId: string +} + +export interface OutlookMoveResponse extends ToolResponse { + output: { + message: string + results: { + messageId: string + newFolderId: string + } + } +} + +export interface OutlookMarkReadParams { + accessToken: string + messageId: string +} + +export interface OutlookMarkReadResponse extends ToolResponse { + output: { + message: string + results: { + messageId: string + isRead: boolean + } + } +} + +export interface OutlookDeleteParams { + accessToken: string + messageId: string +} + +export interface OutlookDeleteResponse extends ToolResponse { + output: { + message: string + results: { + messageId: string + status: string + } + } +} + +export interface OutlookCopyParams { + accessToken: string + messageId: string + destinationId: string +} + +export interface OutlookCopyResponse extends ToolResponse { + output: { + message: string + results: { + originalMessageId: string + copiedMessageId: string + destinationFolderId: string + } + } +} + +export type OutlookExtendedResponse = + | OutlookResponse + | OutlookForwardResponse + | OutlookMoveResponse + | OutlookMarkReadResponse + | OutlookDeleteResponse + | OutlookCopyResponse diff --git a/apps/sim/tools/registry.ts b/apps/sim/tools/registry.ts index dc29b9d4f..d8a11b2be 100644 --- a/apps/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -32,7 +32,20 @@ import { githubPrTool, githubRepoInfoTool, } from '@/tools/github' -import { gmailDraftTool, gmailReadTool, gmailSearchTool, gmailSendTool } from '@/tools/gmail' +import { + gmailAddLabelTool, + gmailArchiveTool, + gmailDeleteTool, + gmailDraftTool, + gmailMarkReadTool, + gmailMarkUnreadTool, + gmailMoveTool, + gmailReadTool, + gmailRemoveLabelTool, + gmailSearchTool, + gmailSendTool, + gmailUnarchiveTool, +} from '@/tools/gmail' import { searchTool as googleSearchTool } from '@/tools/google' import { googleCalendarCreateTool, @@ -44,6 +57,7 @@ import { import { googleDocsCreateTool, googleDocsReadTool, googleDocsWriteTool } from '@/tools/google_docs' import { googleDriveCreateFolderTool, + googleDriveDownloadTool, googleDriveGetContentTool, googleDriveListTool, googleDriveUploadTool, @@ -125,11 +139,21 @@ import { notionSearchTool, notionWriteTool, } from '@/tools/notion' -import { onedriveCreateFolderTool, onedriveListTool, onedriveUploadTool } from '@/tools/onedrive' +import { + onedriveCreateFolderTool, + onedriveDownloadTool, + onedriveListTool, + onedriveUploadTool, +} from '@/tools/onedrive' import { imageTool, embeddingsTool as openAIEmbeddings } from '@/tools/openai' import { + outlookCopyTool, + outlookDeleteTool, outlookDraftTool, outlookForwardTool, + outlookMarkReadTool, + outlookMarkUnreadTool, + outlookMoveTool, outlookReadTool, outlookSendTool, } from '@/tools/outlook' @@ -170,7 +194,15 @@ import { sharepointUpdateListItemTool, sharepointUploadFileTool, } from '@/tools/sharepoint' -import { slackCanvasTool, slackMessageReaderTool, slackMessageTool } from '@/tools/slack' +import { + slackAddReactionTool, + slackCanvasTool, + slackDeleteMessageTool, + slackDownloadTool, + slackMessageReaderTool, + slackMessageTool, + slackUpdateMessageTool, +} from '@/tools/slack' import { smsSendTool } from '@/tools/sms' import { stagehandAgentTool, stagehandExtractTool } from '@/tools/stagehand' import { @@ -272,6 +304,10 @@ export const tools: Record = { slack_message: slackMessageTool, slack_message_reader: slackMessageReaderTool, slack_canvas: slackCanvasTool, + slack_download: slackDownloadTool, + slack_update_message: slackUpdateMessageTool, + slack_delete_message: slackDeleteMessageTool, + slack_add_reaction: slackAddReactionTool, github_repo_info: githubRepoInfoTool, github_latest_commit: githubLatestCommitTool, serper_search: serperSearch, @@ -306,6 +342,14 @@ export const tools: Record = { gmail_read: gmailReadTool, gmail_search: gmailSearchTool, gmail_draft: gmailDraftTool, + gmail_move: gmailMoveTool, + gmail_mark_read: gmailMarkReadTool, + gmail_mark_unread: gmailMarkUnreadTool, + gmail_archive: gmailArchiveTool, + gmail_unarchive: gmailUnarchiveTool, + gmail_delete: gmailDeleteTool, + gmail_add_label: gmailAddLabelTool, + gmail_remove_label: gmailRemoveLabelTool, whatsapp_send_message: whatsappSendMessageTool, x_write: xWriteTool, x_read: xReadTool, @@ -345,6 +389,7 @@ export const tools: Record = { google_drive_get_content: googleDriveGetContentTool, google_drive_list: googleDriveListTool, google_drive_upload: googleDriveUploadTool, + google_drive_download: googleDriveDownloadTool, google_drive_create_folder: googleDriveCreateFolderTool, google_docs_read: googleDocsReadTool, google_docs_write: googleDocsWriteTool, @@ -415,9 +460,15 @@ export const tools: Record = { outlook_send: outlookSendTool, outlook_draft: outlookDraftTool, outlook_forward: outlookForwardTool, + outlook_move: outlookMoveTool, + outlook_mark_read: outlookMarkReadTool, + outlook_mark_unread: outlookMarkUnreadTool, + outlook_delete: outlookDeleteTool, + outlook_copy: outlookCopyTool, linear_read_issues: linearReadIssuesTool, linear_create_issue: linearCreateIssueTool, onedrive_create_folder: onedriveCreateFolderTool, + onedrive_download: onedriveDownloadTool, onedrive_list: onedriveListTool, onedrive_upload: onedriveUploadTool, microsoft_excel_read: microsoftExcelReadTool, diff --git a/apps/sim/tools/slack/add_reaction.ts b/apps/sim/tools/slack/add_reaction.ts new file mode 100644 index 000000000..2dfd0df3b --- /dev/null +++ b/apps/sim/tools/slack/add_reaction.ts @@ -0,0 +1,108 @@ +import type { SlackAddReactionParams, SlackAddReactionResponse } from '@/tools/slack/types' +import type { ToolConfig } from '@/tools/types' + +export const slackAddReactionTool: ToolConfig = { + id: 'slack_add_reaction', + name: 'Slack Add Reaction', + description: 'Add an emoji reaction to a Slack message', + version: '1.0.0', + + oauth: { + required: true, + provider: 'slack', + additionalScopes: ['reactions:write'], + }, + + params: { + authMethod: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Authentication method: oauth or bot_token', + }, + botToken: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Bot token for Custom Bot', + }, + accessToken: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'OAuth access token or bot token for Slack API', + }, + channel: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Channel ID where the message was posted (e.g., C1234567890)', + }, + timestamp: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Timestamp of the message to react to (e.g., 1405894322.002768)', + }, + name: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Name of the emoji reaction (without colons, e.g., thumbsup, heart, eyes)', + }, + }, + + request: { + url: '/api/tools/slack/add-reaction', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: SlackAddReactionParams) => ({ + accessToken: params.accessToken || params.botToken, + channel: params.channel, + timestamp: params.timestamp, + name: params.name, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to add reaction', + metadata: { + channel: '', + timestamp: '', + reaction: '', + }, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Reaction metadata', + properties: { + channel: { type: 'string', description: 'Channel ID' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + reaction: { type: 'string', description: 'Emoji reaction name' }, + }, + }, + }, +} diff --git a/apps/sim/tools/slack/delete_message.ts b/apps/sim/tools/slack/delete_message.ts new file mode 100644 index 000000000..ef369d62b --- /dev/null +++ b/apps/sim/tools/slack/delete_message.ts @@ -0,0 +1,102 @@ +import type { SlackDeleteMessageParams, SlackDeleteMessageResponse } from '@/tools/slack/types' +import type { ToolConfig } from '@/tools/types' + +export const slackDeleteMessageTool: ToolConfig< + SlackDeleteMessageParams, + SlackDeleteMessageResponse +> = { + id: 'slack_delete_message', + name: 'Slack Delete Message', + description: 'Delete a message previously sent by the bot in Slack', + version: '1.0.0', + + oauth: { + required: true, + provider: 'slack', + additionalScopes: ['chat:write'], + }, + + params: { + authMethod: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Authentication method: oauth or bot_token', + }, + botToken: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Bot token for Custom Bot', + }, + accessToken: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'OAuth access token or bot token for Slack API', + }, + channel: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Channel ID where the message was posted (e.g., C1234567890)', + }, + timestamp: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Timestamp of the message to delete (e.g., 1405894322.002768)', + }, + }, + + request: { + url: '/api/tools/slack/delete-message', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: SlackDeleteMessageParams) => ({ + accessToken: params.accessToken || params.botToken, + channel: params.channel, + timestamp: params.timestamp, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to delete message', + metadata: { + channel: '', + timestamp: '', + }, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Deleted message metadata', + properties: { + channel: { type: 'string', description: 'Channel ID' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + }, + }, + }, +} diff --git a/apps/sim/tools/slack/download.ts b/apps/sim/tools/slack/download.ts new file mode 100644 index 000000000..cd658dde4 --- /dev/null +++ b/apps/sim/tools/slack/download.ts @@ -0,0 +1,151 @@ +import { createLogger } from '@/lib/logs/console/logger' +import type { SlackDownloadParams, SlackDownloadResponse } from '@/tools/slack/types' +import type { ToolConfig } from '@/tools/types' + +const logger = createLogger('SlackDownloadTool') + +export const slackDownloadTool: ToolConfig = { + id: 'slack_download', + name: 'Download File from Slack', + description: 'Download a file from Slack', + version: '1.0', + + oauth: { + required: true, + provider: 'slack', + additionalScopes: ['files:read'], + }, + + params: { + authMethod: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Authentication method: oauth or bot_token', + }, + botToken: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Bot token for Custom Bot', + }, + accessToken: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'OAuth access token or bot token for Slack API', + }, + fileId: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'The ID of the file to download', + }, + fileName: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Optional filename override', + }, + }, + + request: { + url: (params) => `https://slack.com/api/files.info?file=${params.fileId}`, + method: 'GET', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken || params.botToken}`, + }), + }, + + transformResponse: async (response: Response, params?: SlackDownloadParams) => { + try { + if (!response.ok) { + const errorDetails = await response.json().catch(() => ({})) + logger.error('Failed to get file info from Slack', { + status: response.status, + statusText: response.statusText, + error: errorDetails, + }) + throw new Error(errorDetails.error || 'Failed to get file info') + } + + const data = await response.json() + + if (!data.ok) { + logger.error('Slack API returned error', { + error: data.error, + }) + throw new Error(data.error || 'Slack API error') + } + + const file = data.file + const fileId = file.id + const fileName = file.name + const mimeType = file.mimetype || 'application/octet-stream' + const urlPrivate = file.url_private + const authToken = params?.accessToken || params?.botToken || '' + + if (!urlPrivate) { + throw new Error('File does not have a download URL') + } + + logger.info('Downloading file from Slack', { + fileId, + fileName, + mimeType, + }) + + const downloadResponse = await fetch(urlPrivate, { + headers: { + Authorization: `Bearer ${authToken}`, + }, + }) + + if (!downloadResponse.ok) { + logger.error('Failed to download file content', { + status: downloadResponse.status, + statusText: downloadResponse.statusText, + }) + throw new Error('Failed to download file content') + } + + const arrayBuffer = await downloadResponse.arrayBuffer() + const fileBuffer = Buffer.from(arrayBuffer) + + const resolvedName = params?.fileName || fileName || 'download' + + logger.info('File downloaded successfully', { + fileId, + name: resolvedName, + size: fileBuffer.length, + mimeType, + }) + + // Convert buffer to base64 string for proper JSON serialization + // This ensures the file data survives the proxy round-trip + const base64Data = fileBuffer.toString('base64') + + return { + success: true, + output: { + file: { + name: resolvedName, + mimeType, + data: base64Data, + size: fileBuffer.length, + }, + }, + } + } catch (error: any) { + logger.error('Error in transform response', { + error: error.message, + stack: error.stack, + }) + throw error + } + }, + + outputs: { + file: { type: 'file', description: 'Downloaded file stored in execution files' }, + }, +} diff --git a/apps/sim/tools/slack/index.ts b/apps/sim/tools/slack/index.ts index 0355b1478..479520fbc 100644 --- a/apps/sim/tools/slack/index.ts +++ b/apps/sim/tools/slack/index.ts @@ -1,5 +1,17 @@ +import { slackAddReactionTool } from '@/tools/slack/add_reaction' import { slackCanvasTool } from '@/tools/slack/canvas' +import { slackDeleteMessageTool } from '@/tools/slack/delete_message' +import { slackDownloadTool } from '@/tools/slack/download' import { slackMessageTool } from '@/tools/slack/message' import { slackMessageReaderTool } from '@/tools/slack/message_reader' +import { slackUpdateMessageTool } from '@/tools/slack/update_message' -export { slackMessageTool, slackCanvasTool, slackMessageReaderTool } +export { + slackMessageTool, + slackCanvasTool, + slackMessageReaderTool, + slackDownloadTool, + slackUpdateMessageTool, + slackDeleteMessageTool, + slackAddReactionTool, +} diff --git a/apps/sim/tools/slack/message.ts b/apps/sim/tools/slack/message.ts index db52e6338..d2348a92d 100644 --- a/apps/sim/tools/slack/message.ts +++ b/apps/sim/tools/slack/message.ts @@ -51,6 +51,12 @@ export const slackMessageTool: ToolConfig ({ + id: file.id, + name: file.name, + mimetype: file.mimetype, + size: file.size, + url_private: file.url_private, + })), })) return { @@ -122,6 +129,19 @@ export const slackMessageReaderTool: ToolConfig< user: { type: 'string' }, type: { type: 'string' }, subtype: { type: 'string' }, + files: { + type: 'array', + items: { + type: 'object', + properties: { + id: { type: 'string' }, + name: { type: 'string' }, + mimetype: { type: 'string' }, + size: { type: 'number' }, + url_private: { type: 'string' }, + }, + }, + }, }, }, }, diff --git a/apps/sim/tools/slack/types.ts b/apps/sim/tools/slack/types.ts index 0a924083b..a5bdbef5d 100644 --- a/apps/sim/tools/slack/types.ts +++ b/apps/sim/tools/slack/types.ts @@ -27,6 +27,28 @@ export interface SlackMessageReaderParams extends SlackBaseParams { latest?: string } +export interface SlackDownloadParams extends SlackBaseParams { + fileId: string + fileName?: string +} + +export interface SlackUpdateMessageParams extends SlackBaseParams { + channel: string + timestamp: string + text: string +} + +export interface SlackDeleteMessageParams extends SlackBaseParams { + channel: string + timestamp: string +} + +export interface SlackAddReactionParams extends SlackBaseParams { + channel: string + timestamp: string + name: string +} + export interface SlackMessageResponse extends ToolResponse { output: { ts: string @@ -50,8 +72,65 @@ export interface SlackMessageReaderResponse extends ToolResponse { user: string type: string subtype?: string + files?: Array<{ + id: string + name: string + mimetype: string + size: number + url_private?: string + }> }> } } -export type SlackResponse = SlackCanvasResponse | SlackMessageReaderResponse | SlackMessageResponse +export interface SlackDownloadResponse extends ToolResponse { + output: { + file: { + name: string + mimeType: string + data: Buffer | string // Buffer for direct use, string for base64-encoded data + size: number + } + } +} + +export interface SlackUpdateMessageResponse extends ToolResponse { + output: { + content: string + metadata: { + channel: string + timestamp: string + text: string + } + } +} + +export interface SlackDeleteMessageResponse extends ToolResponse { + output: { + content: string + metadata: { + channel: string + timestamp: string + } + } +} + +export interface SlackAddReactionResponse extends ToolResponse { + output: { + content: string + metadata: { + channel: string + timestamp: string + reaction: string + } + } +} + +export type SlackResponse = + | SlackCanvasResponse + | SlackMessageReaderResponse + | SlackMessageResponse + | SlackDownloadResponse + | SlackUpdateMessageResponse + | SlackDeleteMessageResponse + | SlackAddReactionResponse diff --git a/apps/sim/tools/slack/update_message.ts b/apps/sim/tools/slack/update_message.ts new file mode 100644 index 000000000..b7c7fefe7 --- /dev/null +++ b/apps/sim/tools/slack/update_message.ts @@ -0,0 +1,111 @@ +import type { SlackUpdateMessageParams, SlackUpdateMessageResponse } from '@/tools/slack/types' +import type { ToolConfig } from '@/tools/types' + +export const slackUpdateMessageTool: ToolConfig< + SlackUpdateMessageParams, + SlackUpdateMessageResponse +> = { + id: 'slack_update_message', + name: 'Slack Update Message', + description: 'Update a message previously sent by the bot in Slack', + version: '1.0.0', + + oauth: { + required: true, + provider: 'slack', + additionalScopes: ['chat:write'], + }, + + params: { + authMethod: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Authentication method: oauth or bot_token', + }, + botToken: { + type: 'string', + required: false, + visibility: 'user-only', + description: 'Bot token for Custom Bot', + }, + accessToken: { + type: 'string', + required: false, + visibility: 'hidden', + description: 'OAuth access token or bot token for Slack API', + }, + channel: { + type: 'string', + required: true, + visibility: 'user-only', + description: 'Channel ID where the message was posted (e.g., C1234567890)', + }, + timestamp: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'Timestamp of the message to update (e.g., 1405894322.002768)', + }, + text: { + type: 'string', + required: true, + visibility: 'user-or-llm', + description: 'New message text (supports Slack mrkdwn formatting)', + }, + }, + + request: { + url: '/api/tools/slack/update-message', + method: 'POST', + headers: () => ({ + 'Content-Type': 'application/json', + }), + body: (params: SlackUpdateMessageParams) => ({ + accessToken: params.accessToken || params.botToken, + channel: params.channel, + timestamp: params.timestamp, + text: params.text, + }), + }, + + transformResponse: async (response: Response) => { + const data = await response.json() + + if (!data.success) { + return { + success: false, + output: { + content: data.error || 'Failed to update message', + metadata: { + channel: '', + timestamp: '', + text: '', + }, + }, + error: data.error, + } + } + + return { + success: true, + output: { + content: data.output.content, + metadata: data.output.metadata, + }, + } + }, + + outputs: { + content: { type: 'string', description: 'Success message' }, + metadata: { + type: 'object', + description: 'Updated message metadata', + properties: { + channel: { type: 'string', description: 'Channel ID' }, + timestamp: { type: 'string', description: 'Message timestamp' }, + text: { type: 'string', description: 'Updated message text' }, + }, + }, + }, +} diff --git a/apps/sim/triggers/slack/webhook.ts b/apps/sim/triggers/slack/webhook.ts index a674d7167..38023f6ae 100644 --- a/apps/sim/triggers/slack/webhook.ts +++ b/apps/sim/triggers/slack/webhook.ts @@ -71,6 +71,7 @@ export const slackWebhookTrigger: TriggerConfig = { user: 'U0123456789', text: '<@U0BOTUSER123> Hello from Slack!', ts: '1234567890.123456', + thread_ts: '1234567890.000000', channel_type: 'channel', }, team_id: 'T0123456789', @@ -115,7 +116,11 @@ export const slackWebhookTrigger: TriggerConfig = { }, timestamp: { type: 'string', - description: 'Event timestamp', + description: 'Message timestamp from the triggering event', + }, + thread_ts: { + type: 'string', + description: 'Parent thread timestamp (if message is in a thread)', }, team_id: { type: 'string', diff --git a/bun.lock b/bun.lock index cbe645bb8..0fd0619c6 100644 --- a/bun.lock +++ b/bun.lock @@ -131,7 +131,7 @@ "officeparser": "^5.2.0", "openai": "^4.91.1", "papaparse": "5.5.3", - "pdf-parse": "1.1.1", + "pdf-parse": "2.4.5", "posthog-js": "1.268.9", "posthog-node": "5.9.2", "prismjs": "^1.30.0", @@ -2528,7 +2528,7 @@ "pathval": ["pathval@2.0.1", "", {}, "sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ=="], - "pdf-parse": ["pdf-parse@1.1.1", "", { "dependencies": { "debug": "^3.1.0", "node-ensure": "^0.0.0" } }, "sha512-v6ZJ/efsBpGrGGknjtq9J/oC8tZWq0KWL5vQrk2GlzLEQPUDB1ex+13Rmidl1neNN358Jn9EHZw5y07FFtaC7A=="], + "pdf-parse": ["pdf-parse@2.4.5", "", { "dependencies": { "@napi-rs/canvas": "0.1.80", "pdfjs-dist": "5.4.296" }, "bin": { "pdf-parse": "bin/cli.mjs" } }, "sha512-mHU89HGh7v+4u2ubfnevJ03lmPgQ5WU4CxAVmTSh/sxVTEDYd1er/dKS/A6vg77NX47KTEoihq8jZBLr8Cxuwg=="], "pdfjs-dist": ["pdfjs-dist@5.4.149", "", { "optionalDependencies": { "@napi-rs/canvas": "^0.1.77" } }, "sha512-Xe8/1FMJEQPUVSti25AlDpwpUm2QAVmNOpFP0SIahaPIOKBKICaefbzogLdwey3XGGoaP4Lb9wqiw2e9Jqp0LA=="], @@ -3524,7 +3524,7 @@ "parse-entities/@types/unist": ["@types/unist@2.0.11", "", {}, "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA=="], - "pdf-parse/debug": ["debug@3.2.7", "", { "dependencies": { "ms": "^2.1.1" } }, "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ=="], + "pdf-parse/pdfjs-dist": ["pdfjs-dist@5.4.296", "", { "optionalDependencies": { "@napi-rs/canvas": "^0.1.80" } }, "sha512-DlOzet0HO7OEnmUmB6wWGJrrdvbyJKftI1bhMitK7O2N8W2gc757yyYBbINy9IDafXAV9wmKr9t7xsTaNKRG5Q=="], "playwright/fsevents": ["fsevents@2.3.2", "", { "os": "darwin" }, "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA=="],