mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 15:07:55 -05:00
v0.5.36: hitl improvements, opengraph, slack fixes, one-click unsubscribe, auth checks, new db indexes
This commit is contained in:
@@ -6,7 +6,10 @@ import { source } from '@/lib/source'
|
|||||||
|
|
||||||
export const revalidate = false
|
export const revalidate = false
|
||||||
|
|
||||||
export async function GET(_req: NextRequest, { params }: { params: Promise<{ slug?: string[] }> }) {
|
export async function GET(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ slug?: string[] }> }
|
||||||
|
) {
|
||||||
const { slug } = await params
|
const { slug } = await params
|
||||||
|
|
||||||
let lang: (typeof i18n.languages)[number] = i18n.defaultLanguage
|
let lang: (typeof i18n.languages)[number] = i18n.defaultLanguage
|
||||||
|
|||||||
@@ -120,117 +120,117 @@ import {
|
|||||||
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
|
||||||
|
|
||||||
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||||
zoom: ZoomIcon,
|
|
||||||
zep: ZepIcon,
|
|
||||||
zendesk: ZendeskIcon,
|
|
||||||
youtube: YouTubeIcon,
|
|
||||||
x: xIcon,
|
|
||||||
wordpress: WordpressIcon,
|
|
||||||
wikipedia: WikipediaIcon,
|
|
||||||
whatsapp: WhatsAppIcon,
|
|
||||||
webflow: WebflowIcon,
|
|
||||||
wealthbox: WealthboxIcon,
|
|
||||||
vision: EyeIcon,
|
|
||||||
video_generator: VideoIcon,
|
|
||||||
typeform: TypeformIcon,
|
|
||||||
twilio_voice: TwilioIcon,
|
|
||||||
twilio_sms: TwilioIcon,
|
|
||||||
tts: TTSIcon,
|
|
||||||
trello: TrelloIcon,
|
|
||||||
translate: TranslateIcon,
|
|
||||||
thinking: BrainIcon,
|
|
||||||
telegram: TelegramIcon,
|
|
||||||
tavily: TavilyIcon,
|
|
||||||
supabase: SupabaseIcon,
|
|
||||||
stt: STTIcon,
|
|
||||||
stripe: StripeIcon,
|
|
||||||
stagehand: StagehandIcon,
|
|
||||||
ssh: SshIcon,
|
|
||||||
sqs: SQSIcon,
|
|
||||||
spotify: SpotifyIcon,
|
|
||||||
smtp: SmtpIcon,
|
|
||||||
slack: SlackIcon,
|
|
||||||
shopify: ShopifyIcon,
|
|
||||||
sharepoint: MicrosoftSharepointIcon,
|
|
||||||
sftp: SftpIcon,
|
|
||||||
servicenow: ServiceNowIcon,
|
|
||||||
serper: SerperIcon,
|
|
||||||
sentry: SentryIcon,
|
|
||||||
sendgrid: SendgridIcon,
|
|
||||||
search: SearchIcon,
|
|
||||||
salesforce: SalesforceIcon,
|
|
||||||
s3: S3Icon,
|
|
||||||
resend: ResendIcon,
|
|
||||||
reddit: RedditIcon,
|
|
||||||
rds: RDSIcon,
|
|
||||||
qdrant: QdrantIcon,
|
|
||||||
posthog: PosthogIcon,
|
|
||||||
postgresql: PostgresIcon,
|
|
||||||
polymarket: PolymarketIcon,
|
|
||||||
pipedrive: PipedriveIcon,
|
|
||||||
pinecone: PineconeIcon,
|
|
||||||
perplexity: PerplexityIcon,
|
|
||||||
parallel_ai: ParallelIcon,
|
|
||||||
outlook: OutlookIcon,
|
|
||||||
openai: OpenAIIcon,
|
|
||||||
onedrive: MicrosoftOneDriveIcon,
|
|
||||||
notion: NotionIcon,
|
|
||||||
neo4j: Neo4jIcon,
|
|
||||||
mysql: MySQLIcon,
|
|
||||||
mongodb: MongoDBIcon,
|
|
||||||
mistral_parse: MistralIcon,
|
|
||||||
microsoft_teams: MicrosoftTeamsIcon,
|
|
||||||
microsoft_planner: MicrosoftPlannerIcon,
|
|
||||||
microsoft_excel: MicrosoftExcelIcon,
|
|
||||||
memory: BrainIcon,
|
|
||||||
mem0: Mem0Icon,
|
|
||||||
mailgun: MailgunIcon,
|
|
||||||
mailchimp: MailchimpIcon,
|
|
||||||
linkup: LinkupIcon,
|
|
||||||
linkedin: LinkedInIcon,
|
|
||||||
linear: LinearIcon,
|
|
||||||
knowledge: PackageSearchIcon,
|
|
||||||
kalshi: KalshiIcon,
|
|
||||||
jira: JiraIcon,
|
|
||||||
jina: JinaAIIcon,
|
|
||||||
intercom: IntercomIcon,
|
|
||||||
incidentio: IncidentioIcon,
|
|
||||||
image_generator: ImageIcon,
|
|
||||||
hunter: HunterIOIcon,
|
|
||||||
huggingface: HuggingFaceIcon,
|
|
||||||
hubspot: HubspotIcon,
|
|
||||||
grafana: GrafanaIcon,
|
|
||||||
google_vault: GoogleVaultIcon,
|
|
||||||
google_slides: GoogleSlidesIcon,
|
|
||||||
google_sheets: GoogleSheetsIcon,
|
|
||||||
google_groups: GoogleGroupsIcon,
|
|
||||||
google_forms: GoogleFormsIcon,
|
|
||||||
google_drive: GoogleDriveIcon,
|
|
||||||
google_docs: GoogleDocsIcon,
|
|
||||||
google_calendar: GoogleCalendarIcon,
|
|
||||||
google_search: GoogleIcon,
|
|
||||||
gmail: GmailIcon,
|
|
||||||
gitlab: GitLabIcon,
|
|
||||||
github: GithubIcon,
|
|
||||||
firecrawl: FirecrawlIcon,
|
|
||||||
file: DocumentIcon,
|
|
||||||
exa: ExaAIIcon,
|
|
||||||
elevenlabs: ElevenLabsIcon,
|
|
||||||
elasticsearch: ElasticsearchIcon,
|
|
||||||
dynamodb: DynamoDBIcon,
|
|
||||||
duckduckgo: DuckDuckGoIcon,
|
|
||||||
dropbox: DropboxIcon,
|
|
||||||
discord: DiscordIcon,
|
|
||||||
datadog: DatadogIcon,
|
|
||||||
cursor: CursorIcon,
|
|
||||||
confluence: ConfluenceIcon,
|
|
||||||
clay: ClayIcon,
|
|
||||||
calendly: CalendlyIcon,
|
|
||||||
browser_use: BrowserUseIcon,
|
|
||||||
asana: AsanaIcon,
|
|
||||||
arxiv: ArxivIcon,
|
|
||||||
apollo: ApolloIcon,
|
|
||||||
apify: ApifyIcon,
|
|
||||||
airtable: AirtableIcon,
|
|
||||||
ahrefs: AhrefsIcon,
|
ahrefs: AhrefsIcon,
|
||||||
|
airtable: AirtableIcon,
|
||||||
|
apify: ApifyIcon,
|
||||||
|
apollo: ApolloIcon,
|
||||||
|
arxiv: ArxivIcon,
|
||||||
|
asana: AsanaIcon,
|
||||||
|
browser_use: BrowserUseIcon,
|
||||||
|
calendly: CalendlyIcon,
|
||||||
|
clay: ClayIcon,
|
||||||
|
confluence: ConfluenceIcon,
|
||||||
|
cursor: CursorIcon,
|
||||||
|
datadog: DatadogIcon,
|
||||||
|
discord: DiscordIcon,
|
||||||
|
dropbox: DropboxIcon,
|
||||||
|
duckduckgo: DuckDuckGoIcon,
|
||||||
|
dynamodb: DynamoDBIcon,
|
||||||
|
elasticsearch: ElasticsearchIcon,
|
||||||
|
elevenlabs: ElevenLabsIcon,
|
||||||
|
exa: ExaAIIcon,
|
||||||
|
file: DocumentIcon,
|
||||||
|
firecrawl: FirecrawlIcon,
|
||||||
|
github: GithubIcon,
|
||||||
|
gitlab: GitLabIcon,
|
||||||
|
gmail: GmailIcon,
|
||||||
|
google_calendar: GoogleCalendarIcon,
|
||||||
|
google_docs: GoogleDocsIcon,
|
||||||
|
google_drive: GoogleDriveIcon,
|
||||||
|
google_forms: GoogleFormsIcon,
|
||||||
|
google_groups: GoogleGroupsIcon,
|
||||||
|
google_search: GoogleIcon,
|
||||||
|
google_sheets: GoogleSheetsIcon,
|
||||||
|
google_slides: GoogleSlidesIcon,
|
||||||
|
google_vault: GoogleVaultIcon,
|
||||||
|
grafana: GrafanaIcon,
|
||||||
|
hubspot: HubspotIcon,
|
||||||
|
huggingface: HuggingFaceIcon,
|
||||||
|
hunter: HunterIOIcon,
|
||||||
|
image_generator: ImageIcon,
|
||||||
|
incidentio: IncidentioIcon,
|
||||||
|
intercom: IntercomIcon,
|
||||||
|
jina: JinaAIIcon,
|
||||||
|
jira: JiraIcon,
|
||||||
|
kalshi: KalshiIcon,
|
||||||
|
knowledge: PackageSearchIcon,
|
||||||
|
linear: LinearIcon,
|
||||||
|
linkedin: LinkedInIcon,
|
||||||
|
linkup: LinkupIcon,
|
||||||
|
mailchimp: MailchimpIcon,
|
||||||
|
mailgun: MailgunIcon,
|
||||||
|
mem0: Mem0Icon,
|
||||||
|
memory: BrainIcon,
|
||||||
|
microsoft_excel: MicrosoftExcelIcon,
|
||||||
|
microsoft_planner: MicrosoftPlannerIcon,
|
||||||
|
microsoft_teams: MicrosoftTeamsIcon,
|
||||||
|
mistral_parse: MistralIcon,
|
||||||
|
mongodb: MongoDBIcon,
|
||||||
|
mysql: MySQLIcon,
|
||||||
|
neo4j: Neo4jIcon,
|
||||||
|
notion: NotionIcon,
|
||||||
|
onedrive: MicrosoftOneDriveIcon,
|
||||||
|
openai: OpenAIIcon,
|
||||||
|
outlook: OutlookIcon,
|
||||||
|
parallel_ai: ParallelIcon,
|
||||||
|
perplexity: PerplexityIcon,
|
||||||
|
pinecone: PineconeIcon,
|
||||||
|
pipedrive: PipedriveIcon,
|
||||||
|
polymarket: PolymarketIcon,
|
||||||
|
postgresql: PostgresIcon,
|
||||||
|
posthog: PosthogIcon,
|
||||||
|
qdrant: QdrantIcon,
|
||||||
|
rds: RDSIcon,
|
||||||
|
reddit: RedditIcon,
|
||||||
|
resend: ResendIcon,
|
||||||
|
s3: S3Icon,
|
||||||
|
salesforce: SalesforceIcon,
|
||||||
|
search: SearchIcon,
|
||||||
|
sendgrid: SendgridIcon,
|
||||||
|
sentry: SentryIcon,
|
||||||
|
serper: SerperIcon,
|
||||||
|
servicenow: ServiceNowIcon,
|
||||||
|
sftp: SftpIcon,
|
||||||
|
sharepoint: MicrosoftSharepointIcon,
|
||||||
|
shopify: ShopifyIcon,
|
||||||
|
slack: SlackIcon,
|
||||||
|
smtp: SmtpIcon,
|
||||||
|
spotify: SpotifyIcon,
|
||||||
|
sqs: SQSIcon,
|
||||||
|
ssh: SshIcon,
|
||||||
|
stagehand: StagehandIcon,
|
||||||
|
stripe: StripeIcon,
|
||||||
|
stt: STTIcon,
|
||||||
|
supabase: SupabaseIcon,
|
||||||
|
tavily: TavilyIcon,
|
||||||
|
telegram: TelegramIcon,
|
||||||
|
thinking: BrainIcon,
|
||||||
|
translate: TranslateIcon,
|
||||||
|
trello: TrelloIcon,
|
||||||
|
tts: TTSIcon,
|
||||||
|
twilio_sms: TwilioIcon,
|
||||||
|
twilio_voice: TwilioIcon,
|
||||||
|
typeform: TypeformIcon,
|
||||||
|
video_generator: VideoIcon,
|
||||||
|
vision: EyeIcon,
|
||||||
|
wealthbox: WealthboxIcon,
|
||||||
|
webflow: WebflowIcon,
|
||||||
|
whatsapp: WhatsAppIcon,
|
||||||
|
wikipedia: WikipediaIcon,
|
||||||
|
wordpress: WordpressIcon,
|
||||||
|
x: xIcon,
|
||||||
|
youtube: YouTubeIcon,
|
||||||
|
zendesk: ZendeskIcon,
|
||||||
|
zep: ZepIcon,
|
||||||
|
zoom: ZoomIcon,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -90,14 +90,20 @@ Ein Jira-Issue erstellen
|
|||||||
|
|
||||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | string | Ja | Ihre Jira-Domain (z.B. ihrfirma.atlassian.net) |
|
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
|
||||||
| `projectId` | string | Ja | Projekt-ID für das Issue |
|
| `projectId` | string | Ja | Projekt-ID für das Issue |
|
||||||
| `summary` | string | Ja | Zusammenfassung für das Issue |
|
| `summary` | string | Ja | Zusammenfassung für das Issue |
|
||||||
| `description` | string | Nein | Beschreibung für das Issue |
|
| `description` | string | Nein | Beschreibung für das Issue |
|
||||||
| `priority` | string | Nein | Priorität für das Issue |
|
| `priority` | string | Nein | Prioritäts-ID oder -Name für das Issue \(z.B. "10000" oder "High"\) |
|
||||||
| `assignee` | string | Nein | Bearbeiter für das Issue |
|
| `assignee` | string | Nein | Account-ID des Bearbeiters für das Issue |
|
||||||
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
|
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie über die Domain abgerufen. |
|
||||||
| `issueType` | string | Ja | Art des zu erstellenden Issues (z.B. Task, Story) |
|
| `issueType` | string | Ja | Typ des zu erstellenden Issues \(z.B. Task, Story\) |
|
||||||
|
| `labels` | array | Nein | Labels für das Issue \(Array von Label-Namen\) |
|
||||||
|
| `duedate` | string | Nein | Fälligkeitsdatum für das Issue \(Format: YYYY-MM-DD\) |
|
||||||
|
| `reporter` | string | Nein | Account-ID des Melders für das Issue |
|
||||||
|
| `environment` | string | Nein | Umgebungsinformationen für das Issue |
|
||||||
|
| `customFieldId` | string | Nein | Benutzerdefinierte Feld-ID \(z.B. customfield_10001\) |
|
||||||
|
| `customFieldValue` | string | Nein | Wert für das benutzerdefinierte Feld |
|
||||||
|
|
||||||
#### Ausgabe
|
#### Ausgabe
|
||||||
|
|
||||||
@@ -107,6 +113,7 @@ Ein Jira-Issue erstellen
|
|||||||
| `issueKey` | string | Erstellter Issue-Key \(z.B. PROJ-123\) |
|
| `issueKey` | string | Erstellter Issue-Key \(z.B. PROJ-123\) |
|
||||||
| `summary` | string | Issue-Zusammenfassung |
|
| `summary` | string | Issue-Zusammenfassung |
|
||||||
| `url` | string | URL zum erstellten Issue |
|
| `url` | string | URL zum erstellten Issue |
|
||||||
|
| `assigneeId` | string | Account-ID des zugewiesenen Benutzers \(falls zugewiesen\) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -520,6 +527,30 @@ Einen Beobachter von einem Jira-Issue entfernen
|
|||||||
| `issueKey` | string | Issue-Key |
|
| `issueKey` | string | Issue-Key |
|
||||||
| `watcherAccountId` | string | Account-ID des entfernten Beobachters |
|
| `watcherAccountId` | string | Account-ID des entfernten Beobachters |
|
||||||
|
|
||||||
|
### `jira_get_users`
|
||||||
|
|
||||||
|
Jira-Benutzer abrufen. Wenn eine Account-ID angegeben wird, wird ein einzelner Benutzer zurückgegeben. Andernfalls wird eine Liste aller Benutzer zurückgegeben.
|
||||||
|
|
||||||
|
#### Eingabe
|
||||||
|
|
||||||
|
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
|
||||||
|
| `accountId` | string | Nein | Optionale Account-ID, um einen bestimmten Benutzer abzurufen. Wenn nicht angegeben, werden alle Benutzer zurückgegeben. |
|
||||||
|
| `startAt` | number | Nein | Der Index des ersten zurückzugebenden Benutzers \(für Paginierung, Standard: 0\) |
|
||||||
|
| `maxResults` | number | Nein | Maximale Anzahl der zurückzugebenden Benutzer \(Standard: 50\) |
|
||||||
|
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
|
||||||
|
|
||||||
|
#### Ausgabe
|
||||||
|
|
||||||
|
| Parameter | Typ | Beschreibung |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | string | Zeitstempel der Operation |
|
||||||
|
| `users` | json | Array von Benutzern mit accountId, displayName, emailAddress, active-Status und avatarUrls |
|
||||||
|
| `total` | number | Gesamtanzahl der zurückgegebenen Benutzer |
|
||||||
|
| `startAt` | number | Startindex für Paginierung |
|
||||||
|
| `maxResults` | number | Maximale Ergebnisse pro Seite |
|
||||||
|
|
||||||
## Hinweise
|
## Hinweise
|
||||||
|
|
||||||
- Kategorie: `tools`
|
- Kategorie: `tools`
|
||||||
|
|||||||
@@ -109,12 +109,12 @@ Lesen Sie die neuesten Nachrichten aus Slack-Kanälen. Rufen Sie den Konversatio
|
|||||||
| Parameter | Typ | Erforderlich | Beschreibung |
|
| Parameter | Typ | Erforderlich | Beschreibung |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `authMethod` | string | Nein | Authentifizierungsmethode: oauth oder bot_token |
|
| `authMethod` | string | Nein | Authentifizierungsmethode: oauth oder bot_token |
|
||||||
| `botToken` | string | Nein | Bot-Token für benutzerdefinierten Bot |
|
| `botToken` | string | Nein | Bot-Token für Custom Bot |
|
||||||
| `channel` | string | Nein | Slack-Kanal, aus dem Nachrichten gelesen werden sollen \(z.B. #general\) |
|
| `channel` | string | Nein | Slack-Kanal, aus dem Nachrichten gelesen werden sollen \(z.B. #general\) |
|
||||||
| `userId` | string | Nein | Benutzer-ID für DM-Konversation \(z.B. U1234567890\) |
|
| `userId` | string | Nein | Benutzer-ID für DM-Konversation \(z.B. U1234567890\) |
|
||||||
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 100\) |
|
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 15\) |
|
||||||
| `oldest` | string | Nein | Beginn des Zeitraums \(Zeitstempel\) |
|
| `oldest` | string | Nein | Beginn des Zeitbereichs \(Zeitstempel\) |
|
||||||
| `latest` | string | Nein | Ende des Zeitraums \(Zeitstempel\) |
|
| `latest` | string | Nein | Ende des Zeitbereichs \(Zeitstempel\) |
|
||||||
|
|
||||||
#### Ausgabe
|
#### Ausgabe
|
||||||
|
|
||||||
|
|||||||
@@ -97,10 +97,16 @@ Write a Jira issue
|
|||||||
| `projectId` | string | Yes | Project ID for the issue |
|
| `projectId` | string | Yes | Project ID for the issue |
|
||||||
| `summary` | string | Yes | Summary for the issue |
|
| `summary` | string | Yes | Summary for the issue |
|
||||||
| `description` | string | No | Description for the issue |
|
| `description` | string | No | Description for the issue |
|
||||||
| `priority` | string | No | Priority for the issue |
|
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
|
||||||
| `assignee` | string | No | Assignee for the issue |
|
| `assignee` | string | No | Assignee account ID for the issue |
|
||||||
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
|
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
|
||||||
|
| `labels` | array | No | Labels for the issue \(array of label names\) |
|
||||||
|
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
|
||||||
|
| `reporter` | string | No | Reporter account ID for the issue |
|
||||||
|
| `environment` | string | No | Environment information for the issue |
|
||||||
|
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
|
||||||
|
| `customFieldValue` | string | No | Value for the custom field |
|
||||||
|
|
||||||
#### Output
|
#### Output
|
||||||
|
|
||||||
@@ -110,6 +116,7 @@ Write a Jira issue
|
|||||||
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
|
||||||
| `summary` | string | Issue summary |
|
| `summary` | string | Issue summary |
|
||||||
| `url` | string | URL to the created issue |
|
| `url` | string | URL to the created issue |
|
||||||
|
| `assigneeId` | string | Account ID of the assigned user \(if assigned\) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -523,6 +530,30 @@ Remove a watcher from a Jira issue
|
|||||||
| `issueKey` | string | Issue key |
|
| `issueKey` | string | Issue key |
|
||||||
| `watcherAccountId` | string | Removed watcher account ID |
|
| `watcherAccountId` | string | Removed watcher account ID |
|
||||||
|
|
||||||
|
### `jira_get_users`
|
||||||
|
|
||||||
|
Get Jira users. If an account ID is provided, returns a single user. Otherwise, returns a list of all users.
|
||||||
|
|
||||||
|
#### Input
|
||||||
|
|
||||||
|
| Parameter | Type | Required | Description |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
|
||||||
|
| `accountId` | string | No | Optional account ID to get a specific user. If not provided, returns all users. |
|
||||||
|
| `startAt` | number | No | The index of the first user to return \(for pagination, default: 0\) |
|
||||||
|
| `maxResults` | number | No | Maximum number of users to return \(default: 50\) |
|
||||||
|
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
|
||||||
|
|
||||||
|
#### Output
|
||||||
|
|
||||||
|
| Parameter | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | string | Timestamp of the operation |
|
||||||
|
| `users` | json | Array of users with accountId, displayName, emailAddress, active status, and avatarUrls |
|
||||||
|
| `total` | number | Total number of users returned |
|
||||||
|
| `startAt` | number | Pagination start index |
|
||||||
|
| `maxResults` | number | Maximum results per page |
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Notes
|
## Notes
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ Read the latest messages from Slack channels. Retrieve conversation history with
|
|||||||
| `botToken` | string | No | Bot token for Custom Bot |
|
| `botToken` | string | No | Bot token for Custom Bot |
|
||||||
| `channel` | string | No | Slack channel to read messages from \(e.g., #general\) |
|
| `channel` | string | No | Slack channel to read messages from \(e.g., #general\) |
|
||||||
| `userId` | string | No | User ID for DM conversation \(e.g., U1234567890\) |
|
| `userId` | string | No | User ID for DM conversation \(e.g., U1234567890\) |
|
||||||
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 100\) |
|
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 15\) |
|
||||||
| `oldest` | string | No | Start of time range \(timestamp\) |
|
| `oldest` | string | No | Start of time range \(timestamp\) |
|
||||||
| `latest` | string | No | End of time range \(timestamp\) |
|
| `latest` | string | No | End of time range \(timestamp\) |
|
||||||
|
|
||||||
|
|||||||
@@ -89,24 +89,31 @@ Escribir una incidencia de Jira
|
|||||||
#### Entrada
|
#### Entrada
|
||||||
|
|
||||||
| Parámetro | Tipo | Obligatorio | Descripción |
|
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | ----------- | ----------- |
|
||||||
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
|
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
|
||||||
| `projectId` | string | Sí | ID del proyecto para la incidencia |
|
| `projectId` | string | Sí | ID del proyecto para la incidencia |
|
||||||
| `summary` | string | Sí | Resumen de la incidencia |
|
| `summary` | string | Sí | Resumen de la incidencia |
|
||||||
| `description` | string | No | Descripción de la incidencia |
|
| `description` | string | No | Descripción de la incidencia |
|
||||||
| `priority` | string | No | Prioridad de la incidencia |
|
| `priority` | string | No | ID o nombre de prioridad para la incidencia \(p. ej., "10000" o "Alta"\) |
|
||||||
| `assignee` | string | No | Asignado para la incidencia |
|
| `assignee` | string | No | ID de cuenta del asignado para la incidencia |
|
||||||
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá utilizando el dominio. |
|
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
|
||||||
| `issueType` | string | Sí | Tipo de incidencia a crear \(p. ej., Tarea, Historia\) |
|
| `issueType` | string | Sí | Tipo de incidencia a crear \(p. ej., Tarea, Historia\) |
|
||||||
|
| `labels` | array | No | Etiquetas para la incidencia \(array de nombres de etiquetas\) |
|
||||||
|
| `duedate` | string | No | Fecha de vencimiento para la incidencia \(formato: AAAA-MM-DD\) |
|
||||||
|
| `reporter` | string | No | ID de cuenta del informador para la incidencia |
|
||||||
|
| `environment` | string | No | Información del entorno para la incidencia |
|
||||||
|
| `customFieldId` | string | No | ID del campo personalizado \(p. ej., customfield_10001\) |
|
||||||
|
| `customFieldValue` | string | No | Valor para el campo personalizado |
|
||||||
|
|
||||||
#### Salida
|
#### Salida
|
||||||
|
|
||||||
| Parámetro | Tipo | Descripción |
|
| Parámetro | Tipo | Descripción |
|
||||||
| --------- | ---- | ----------- |
|
| --------- | ---- | ----------- |
|
||||||
| `ts` | string | Marca de tiempo de la operación |
|
| `ts` | string | Marca de tiempo de la operación |
|
||||||
| `issueKey` | string | Clave de la incidencia creada (p. ej., PROJ-123) |
|
| `issueKey` | string | Clave de la incidencia creada \(p. ej., PROJ-123\) |
|
||||||
| `summary` | string | Resumen de la incidencia |
|
| `summary` | string | Resumen de la incidencia |
|
||||||
| `url` | string | URL de la incidencia creada |
|
| `url` | string | URL de la incidencia creada |
|
||||||
|
| `assigneeId` | string | ID de cuenta del usuario asignado \(si está asignado\) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -520,6 +527,30 @@ Eliminar un observador de una incidencia de Jira
|
|||||||
| `issueKey` | string | Clave de incidencia |
|
| `issueKey` | string | Clave de incidencia |
|
||||||
| `watcherAccountId` | string | ID de cuenta del observador eliminado |
|
| `watcherAccountId` | string | ID de cuenta del observador eliminado |
|
||||||
|
|
||||||
|
### `jira_get_users`
|
||||||
|
|
||||||
|
Obtener usuarios de Jira. Si se proporciona un ID de cuenta, devuelve un solo usuario. De lo contrario, devuelve una lista de todos los usuarios.
|
||||||
|
|
||||||
|
#### Entrada
|
||||||
|
|
||||||
|
| Parámetro | Tipo | Obligatorio | Descripción |
|
||||||
|
| --------- | ---- | ----------- | ----------- |
|
||||||
|
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
|
||||||
|
| `accountId` | string | No | ID de cuenta opcional para obtener un usuario específico. Si no se proporciona, devuelve todos los usuarios. |
|
||||||
|
| `startAt` | number | No | El índice del primer usuario a devolver \(para paginación, predeterminado: 0\) |
|
||||||
|
| `maxResults` | number | No | Número máximo de usuarios a devolver \(predeterminado: 50\) |
|
||||||
|
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
|
||||||
|
|
||||||
|
#### Salida
|
||||||
|
|
||||||
|
| Parámetro | Tipo | Descripción |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | string | Marca de tiempo de la operación |
|
||||||
|
| `users` | json | Array de usuarios con accountId, displayName, emailAddress, estado activo y avatarUrls |
|
||||||
|
| `total` | number | Número total de usuarios devueltos |
|
||||||
|
| `startAt` | number | Índice de inicio de paginación |
|
||||||
|
| `maxResults` | number | Máximo de resultados por página |
|
||||||
|
|
||||||
## Notas
|
## Notas
|
||||||
|
|
||||||
- Categoría: `tools`
|
- Categoría: `tools`
|
||||||
|
|||||||
@@ -111,8 +111,8 @@ Lee los últimos mensajes de los canales de Slack. Recupera el historial de conv
|
|||||||
| `authMethod` | string | No | Método de autenticación: oauth o bot_token |
|
| `authMethod` | string | No | Método de autenticación: oauth o bot_token |
|
||||||
| `botToken` | string | No | Token del bot para Bot personalizado |
|
| `botToken` | string | No | Token del bot para Bot personalizado |
|
||||||
| `channel` | string | No | Canal de Slack del que leer mensajes (p. ej., #general) |
|
| `channel` | string | No | Canal de Slack del que leer mensajes (p. ej., #general) |
|
||||||
| `userId` | string | No | ID de usuario para conversación por MD (p. ej., U1234567890) |
|
| `userId` | string | No | ID de usuario para conversación de mensaje directo (p. ej., U1234567890) |
|
||||||
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 100) |
|
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 15) |
|
||||||
| `oldest` | string | No | Inicio del rango de tiempo (marca de tiempo) |
|
| `oldest` | string | No | Inicio del rango de tiempo (marca de tiempo) |
|
||||||
| `latest` | string | No | Fin del rango de tiempo (marca de tiempo) |
|
| `latest` | string | No | Fin del rango de tiempo (marca de tiempo) |
|
||||||
|
|
||||||
|
|||||||
@@ -89,15 +89,21 @@ Rédiger une demande Jira
|
|||||||
#### Entrée
|
#### Entrée
|
||||||
|
|
||||||
| Paramètre | Type | Obligatoire | Description |
|
| Paramètre | Type | Obligatoire | Description |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | ----------- | ----------- |
|
||||||
| `domain` | string | Oui | Votre domaine Jira (ex. : votreentreprise.atlassian.net) |
|
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
|
||||||
| `projectId` | string | Oui | ID du projet pour la demande |
|
| `projectId` | chaîne | Oui | ID du projet pour le ticket |
|
||||||
| `summary` | string | Oui | Résumé de la demande |
|
| `summary` | chaîne | Oui | Résumé du ticket |
|
||||||
| `description` | string | Non | Description de la demande |
|
| `description` | chaîne | Non | Description du ticket |
|
||||||
| `priority` | string | Non | Priorité de la demande |
|
| `priority` | chaîne | Non | ID ou nom de la priorité du ticket \(ex. : "10000" ou "Haute"\) |
|
||||||
| `assignee` | string | Non | Assigné de la demande |
|
| `assignee` | chaîne | Non | ID de compte de l'assigné pour le ticket |
|
||||||
| `cloudId` | string | Non | ID Jira Cloud pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
||||||
| `issueType` | string | Oui | Type de demande à créer (ex. : Tâche, Story) |
|
| `issueType` | chaîne | Oui | Type de ticket à créer \(ex. : tâche, story\) |
|
||||||
|
| `labels` | tableau | Non | Étiquettes pour le ticket \(tableau de noms d'étiquettes\) |
|
||||||
|
| `duedate` | chaîne | Non | Date d'échéance du ticket \(format : AAAA-MM-JJ\) |
|
||||||
|
| `reporter` | chaîne | Non | ID de compte du rapporteur pour le ticket |
|
||||||
|
| `environment` | chaîne | Non | Informations d'environnement pour le ticket |
|
||||||
|
| `customFieldId` | chaîne | Non | ID du champ personnalisé \(ex. : customfield_10001\) |
|
||||||
|
| `customFieldValue` | chaîne | Non | Valeur pour le champ personnalisé |
|
||||||
|
|
||||||
#### Sortie
|
#### Sortie
|
||||||
|
|
||||||
@@ -107,6 +113,7 @@ Rédiger une demande Jira
|
|||||||
| `issueKey` | chaîne | Clé du ticket créé \(ex. : PROJ-123\) |
|
| `issueKey` | chaîne | Clé du ticket créé \(ex. : PROJ-123\) |
|
||||||
| `summary` | chaîne | Résumé du ticket |
|
| `summary` | chaîne | Résumé du ticket |
|
||||||
| `url` | chaîne | URL vers le ticket créé |
|
| `url` | chaîne | URL vers le ticket créé |
|
||||||
|
| `assigneeId` | chaîne | ID de compte de l'utilisateur assigné \(si assigné\) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -520,7 +527,31 @@ Supprimer un observateur d'un ticket Jira
|
|||||||
| `issueKey` | string | Clé du ticket |
|
| `issueKey` | string | Clé du ticket |
|
||||||
| `watcherAccountId` | string | ID du compte observateur supprimé |
|
| `watcherAccountId` | string | ID du compte observateur supprimé |
|
||||||
|
|
||||||
## Notes
|
### `jira_get_users`
|
||||||
|
|
||||||
|
Récupère les utilisateurs Jira. Si un ID de compte est fourni, renvoie un seul utilisateur. Sinon, renvoie une liste de tous les utilisateurs.
|
||||||
|
|
||||||
|
#### Entrée
|
||||||
|
|
||||||
|
| Paramètre | Type | Obligatoire | Description |
|
||||||
|
| --------- | ---- | ----------- | ----------- |
|
||||||
|
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
|
||||||
|
| `accountId` | chaîne | Non | ID de compte optionnel pour obtenir un utilisateur spécifique. S'il n'est pas fourni, renvoie tous les utilisateurs. |
|
||||||
|
| `startAt` | nombre | Non | L'index du premier utilisateur à renvoyer \(pour la pagination, par défaut : 0\) |
|
||||||
|
| `maxResults` | nombre | Non | Nombre maximum d'utilisateurs à renvoyer \(par défaut : 50\) |
|
||||||
|
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
|
||||||
|
|
||||||
|
#### Sortie
|
||||||
|
|
||||||
|
| Paramètre | Type | Description |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | chaîne | Horodatage de l'opération |
|
||||||
|
| `users` | json | Tableau d'utilisateurs avec accountId, displayName, emailAddress, statut actif et avatarUrls |
|
||||||
|
| `total` | nombre | Nombre total d'utilisateurs renvoyés |
|
||||||
|
| `startAt` | nombre | Index de début de pagination |
|
||||||
|
| `maxResults` | nombre | Nombre maximum de résultats par page |
|
||||||
|
|
||||||
|
## Remarques
|
||||||
|
|
||||||
- Catégorie : `tools`
|
- Catégorie : `tools`
|
||||||
- Type : `jira`
|
- Type : `jira`
|
||||||
|
|||||||
@@ -107,14 +107,14 @@ Lisez les derniers messages des canaux Slack. Récupérez l'historique des conve
|
|||||||
#### Entrée
|
#### Entrée
|
||||||
|
|
||||||
| Paramètre | Type | Obligatoire | Description |
|
| Paramètre | Type | Obligatoire | Description |
|
||||||
| --------- | ---- | ---------- | ----------- |
|
| --------- | ---- | ----------- | ----------- |
|
||||||
| `authMethod` | chaîne | Non | Méthode d'authentification : oauth ou bot_token |
|
| `authMethod` | chaîne | Non | Méthode d'authentification : oauth ou bot_token |
|
||||||
| `botToken` | chaîne | Non | Jeton du bot pour Bot personnalisé |
|
| `botToken` | chaîne | Non | Jeton du bot pour Bot personnalisé |
|
||||||
| `channel` | chaîne | Non | Canal Slack pour lire les messages \(ex. : #general\) |
|
| `channel` | chaîne | Non | Canal Slack depuis lequel lire les messages \(ex. : #general\) |
|
||||||
| `userId` | chaîne | Non | ID utilisateur pour la conversation en MP \(ex. : U1234567890\) |
|
| `userId` | chaîne | Non | ID utilisateur pour la conversation en message direct \(ex. : U1234567890\) |
|
||||||
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 100\) |
|
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 15\) |
|
||||||
| `oldest` | chaîne | Non | Début de la plage temporelle \(horodatage\) |
|
| `oldest` | chaîne | Non | Début de la plage horaire \(horodatage\) |
|
||||||
| `latest` | chaîne | Non | Fin de la plage temporelle \(horodatage\) |
|
| `latest` | chaîne | Non | Fin de la plage horaire \(horodatage\) |
|
||||||
|
|
||||||
#### Sortie
|
#### Sortie
|
||||||
|
|
||||||
|
|||||||
@@ -94,10 +94,16 @@ Jira課題を作成する
|
|||||||
| `projectId` | string | はい | 課題のプロジェクトID |
|
| `projectId` | string | はい | 課題のプロジェクトID |
|
||||||
| `summary` | string | はい | 課題の要約 |
|
| `summary` | string | はい | 課題の要約 |
|
||||||
| `description` | string | いいえ | 課題の説明 |
|
| `description` | string | いいえ | 課題の説明 |
|
||||||
| `priority` | string | いいえ | 課題の優先度 |
|
| `priority` | string | いいえ | 課題の優先度IDまたは名前(例:「10000」または「高」) |
|
||||||
| `assignee` | string | いいえ | 課題の担当者 |
|
| `assignee` | string | いいえ | 課題の担当者アカウントID |
|
||||||
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
|
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
|
||||||
| `issueType` | string | はい | 作成する課題のタイプ(例:タスク、ストーリー) |
|
| `issueType` | string | はい | 作成する課題のタイプ(例:タスク、ストーリー) |
|
||||||
|
| `labels` | array | いいえ | 課題のラベル(ラベル名の配列) |
|
||||||
|
| `duedate` | string | いいえ | 課題の期限(形式:YYYY-MM-DD) |
|
||||||
|
| `reporter` | string | いいえ | 課題の報告者アカウントID |
|
||||||
|
| `environment` | string | いいえ | 課題の環境情報 |
|
||||||
|
| `customFieldId` | string | いいえ | カスタムフィールドID(例:customfield_10001) |
|
||||||
|
| `customFieldValue` | string | いいえ | カスタムフィールドの値 |
|
||||||
|
|
||||||
#### 出力
|
#### 出力
|
||||||
|
|
||||||
@@ -106,7 +112,8 @@ Jira課題を作成する
|
|||||||
| `ts` | string | 操作のタイムスタンプ |
|
| `ts` | string | 操作のタイムスタンプ |
|
||||||
| `issueKey` | string | 作成された課題キー(例:PROJ-123) |
|
| `issueKey` | string | 作成された課題キー(例:PROJ-123) |
|
||||||
| `summary` | string | 課題の要約 |
|
| `summary` | string | 課題の要約 |
|
||||||
| `url` | string | 作成された課題へのURL |
|
| `url` | string | 作成された課題のURL |
|
||||||
|
| `assigneeId` | string | 割り当てられたユーザーのアカウントID(割り当てられている場合) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -520,7 +527,31 @@ Jira課題からウォッチャーを削除する
|
|||||||
| `issueKey` | string | 課題キー |
|
| `issueKey` | string | 課題キー |
|
||||||
| `watcherAccountId` | string | 削除されたウォッチャーのアカウントID |
|
| `watcherAccountId` | string | 削除されたウォッチャーのアカウントID |
|
||||||
|
|
||||||
## 注意事項
|
### `jira_get_users`
|
||||||
|
|
||||||
- カテゴリー: `tools`
|
Jiraユーザーを取得します。アカウントIDが提供された場合、単一のユーザーを返します。それ以外の場合、すべてのユーザーのリストを返します。
|
||||||
- タイプ: `jira`
|
|
||||||
|
#### 入力
|
||||||
|
|
||||||
|
| パラメータ | 型 | 必須 | 説明 |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `domain` | string | はい | あなたのJiraドメイン(例:yourcompany.atlassian.net) |
|
||||||
|
| `accountId` | string | いいえ | 特定のユーザーを取得するためのオプションのアカウントID。提供されない場合、すべてのユーザーを返します。 |
|
||||||
|
| `startAt` | number | いいえ | 返す最初のユーザーのインデックス(ページネーション用、デフォルト:0) |
|
||||||
|
| `maxResults` | number | いいえ | 返すユーザーの最大数(デフォルト:50) |
|
||||||
|
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
|
||||||
|
|
||||||
|
#### 出力
|
||||||
|
|
||||||
|
| パラメータ | 型 | 説明 |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | string | 操作のタイムスタンプ |
|
||||||
|
| `users` | json | accountId、displayName、emailAddress、activeステータス、avatarUrlsを含むユーザーの配列 |
|
||||||
|
| `total` | number | 返されたユーザーの総数 |
|
||||||
|
| `startAt` | number | ページネーション開始インデックス |
|
||||||
|
| `maxResults` | number | ページあたりの最大結果数 |
|
||||||
|
|
||||||
|
## 注記
|
||||||
|
|
||||||
|
- カテゴリ:`tools`
|
||||||
|
- タイプ:`jira`
|
||||||
|
|||||||
@@ -110,8 +110,8 @@ Slackチャンネルから最新のメッセージを読み取ります。フィ
|
|||||||
| `authMethod` | string | いいえ | 認証方法:oauthまたはbot_token |
|
| `authMethod` | string | いいえ | 認証方法:oauthまたはbot_token |
|
||||||
| `botToken` | string | いいえ | カスタムボット用のボットトークン |
|
| `botToken` | string | いいえ | カスタムボット用のボットトークン |
|
||||||
| `channel` | string | いいえ | メッセージを読み取るSlackチャンネル(例:#general) |
|
| `channel` | string | いいえ | メッセージを読み取るSlackチャンネル(例:#general) |
|
||||||
| `userId` | string | いいえ | DMの会話用のユーザーID(例:U1234567890) |
|
| `userId` | string | いいえ | DM会話用のユーザーID(例:U1234567890) |
|
||||||
| `limit` | number | いいえ | 取得するメッセージ数(デフォルト:10、最大:100) |
|
| `limit` | number | いいえ | 取得するメッセージ数(デフォルト:10、最大:15) |
|
||||||
| `oldest` | string | いいえ | 時間範囲の開始(タイムスタンプ) |
|
| `oldest` | string | いいえ | 時間範囲の開始(タイムスタンプ) |
|
||||||
| `latest` | string | いいえ | 時間範囲の終了(タイムスタンプ) |
|
| `latest` | string | いいえ | 時間範囲の終了(タイムスタンプ) |
|
||||||
|
|
||||||
|
|||||||
@@ -91,13 +91,19 @@ Jira 的主要功能包括:
|
|||||||
| 参数 | 类型 | 必需 | 描述 |
|
| 参数 | 类型 | 必需 | 描述 |
|
||||||
| --------- | ---- | -------- | ----------- |
|
| --------- | ---- | -------- | ----------- |
|
||||||
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如:yourcompany.atlassian.net\) |
|
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如:yourcompany.atlassian.net\) |
|
||||||
| `projectId` | 字符串 | 是 | 问题的项目 ID |
|
| `projectId` | 字符串 | 是 | 问题所属项目 ID |
|
||||||
| `summary` | 字符串 | 是 | 问题的摘要 |
|
| `summary` | 字符串 | 是 | 问题摘要 |
|
||||||
| `description` | 字符串 | 否 | 问题的描述 |
|
| `description` | 字符串 | 否 | 问题描述 |
|
||||||
| `priority` | 字符串 | 否 | 问题的优先级 |
|
| `priority` | 字符串 | 否 | 问题优先级 ID 或名称 \(例如:“10000”或“High”\) |
|
||||||
| `assignee` | 字符串 | 否 | 问题的负责人 |
|
| `assignee` | 字符串 | 否 | 问题负责人账户 ID |
|
||||||
| `cloudId` | 字符串 | 否 | 实例的 Jira 云 ID。如果未提供,将使用域名获取。 |
|
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供,将使用域名获取。 |
|
||||||
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:任务、故事\) |
|
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:Task、Story\) |
|
||||||
|
| `labels` | 数组 | 否 | 问题标签 \(标签名称数组\) |
|
||||||
|
| `duedate` | 字符串 | 否 | 问题截止日期 \(格式:YYYY-MM-DD\) |
|
||||||
|
| `reporter` | 字符串 | 否 | 问题报告人账户 ID |
|
||||||
|
| `environment` | 字符串 | 否 | 问题环境信息 |
|
||||||
|
| `customFieldId` | 字符串 | 否 | 自定义字段 ID \(例如:customfield_10001\) |
|
||||||
|
| `customFieldValue` | 字符串 | 否 | 自定义字段的值 |
|
||||||
|
|
||||||
#### 输出
|
#### 输出
|
||||||
|
|
||||||
@@ -107,6 +113,7 @@ Jira 的主要功能包括:
|
|||||||
| `issueKey` | 字符串 | 创建的问题键 \(例如:PROJ-123\) |
|
| `issueKey` | 字符串 | 创建的问题键 \(例如:PROJ-123\) |
|
||||||
| `summary` | 字符串 | 问题摘要 |
|
| `summary` | 字符串 | 问题摘要 |
|
||||||
| `url` | 字符串 | 创建的问题的 URL |
|
| `url` | 字符串 | 创建的问题的 URL |
|
||||||
|
| `assigneeId` | 字符串 | 已分配用户的账户 ID(如已分配) |
|
||||||
|
|
||||||
### `jira_bulk_read`
|
### `jira_bulk_read`
|
||||||
|
|
||||||
@@ -520,7 +527,31 @@ Jira 的主要功能包括:
|
|||||||
| `issueKey` | string | 问题键 |
|
| `issueKey` | string | 问题键 |
|
||||||
| `watcherAccountId` | string | 移除的观察者账户 ID |
|
| `watcherAccountId` | string | 移除的观察者账户 ID |
|
||||||
|
|
||||||
## 注意事项
|
### `jira_get_users`
|
||||||
|
|
||||||
- 类别: `tools`
|
获取 Jira 用户。如果提供了账户 ID,则返回单个用户,否则返回所有用户的列表。
|
||||||
- 类型: `jira`
|
|
||||||
|
#### 输入
|
||||||
|
|
||||||
|
| 参数 | 类型 | 必需 | 描述 |
|
||||||
|
| --------- | ---- | -------- | ----------- |
|
||||||
|
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如:yourcompany.atlassian.net\) |
|
||||||
|
| `accountId` | 字符串 | 否 | 可选账户 ID,用于获取特定用户。如果未提供,则返回所有用户。 |
|
||||||
|
| `startAt` | 数字 | 否 | 要返回的第一个用户的索引 \(用于分页,默认值:0\) |
|
||||||
|
| `maxResults` | 数字 | 否 | 要返回的最大用户数 \(默认值:50\) |
|
||||||
|
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供,将使用域名获取。 |
|
||||||
|
|
||||||
|
#### 输出
|
||||||
|
|
||||||
|
| 参数 | 类型 | 描述 |
|
||||||
|
| --------- | ---- | ----------- |
|
||||||
|
| `ts` | 字符串 | 操作的时间戳 |
|
||||||
|
| `users` | json | 用户数组,包含 accountId、displayName、emailAddress、active 状态和 avatarUrls |
|
||||||
|
| `total` | 数字 | 返回的用户总数 |
|
||||||
|
| `startAt` | 数字 | 分页起始索引 |
|
||||||
|
| `maxResults` | 数字 | 每页最大结果数 |
|
||||||
|
|
||||||
|
## 备注
|
||||||
|
|
||||||
|
- 分类:`tools`
|
||||||
|
- 类型:`jira`
|
||||||
|
|||||||
@@ -109,10 +109,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
|||||||
| `authMethod` | string | 否 | 认证方法:oauth 或 bot_token |
|
| `authMethod` | string | 否 | 认证方法:oauth 或 bot_token |
|
||||||
| `botToken` | string | 否 | 自定义 Bot 的令牌 |
|
| `botToken` | string | 否 | 自定义 Bot 的令牌 |
|
||||||
| `channel` | string | 否 | 要读取消息的 Slack 频道(例如,#general) |
|
| `channel` | string | 否 | 要读取消息的 Slack 频道(例如,#general) |
|
||||||
| `userId` | string | 否 | DM 对话的用户 ID(例如,U1234567890) |
|
| `userId` | string | 否 | DM 会话的用户 ID(例如,U1234567890) |
|
||||||
| `limit` | number | 否 | 要检索的消息数量(默认:10,最大:100) |
|
| `limit` | number | 否 | 要检索的消息数量(默认:10,最大:15) |
|
||||||
| `oldest` | string | 否 | 时间范围的开始(时间戳) |
|
| `oldest` | string | 否 | 时间范围起始(时间戳) |
|
||||||
| `latest` | string | 否 | 时间范围的结束(时间戳) |
|
| `latest` | string | 否 | 时间范围结束(时间戳) |
|
||||||
|
|
||||||
#### 输出
|
#### 输出
|
||||||
|
|
||||||
|
|||||||
@@ -903,7 +903,7 @@ checksums:
|
|||||||
content/24: 228a8ece96627883153b826a1cbaa06c
|
content/24: 228a8ece96627883153b826a1cbaa06c
|
||||||
content/25: 53abe061a259c296c82676b4770ddd1b
|
content/25: 53abe061a259c296c82676b4770ddd1b
|
||||||
content/26: 371d0e46b4bd2c23f559b8bc112f6955
|
content/26: 371d0e46b4bd2c23f559b8bc112f6955
|
||||||
content/27: 03e8b10ec08b354de98e360b66b779e3
|
content/27: 5b9546f77fbafc0741f3fc2548f81c7e
|
||||||
content/28: bcadfc362b69078beee0088e5936c98b
|
content/28: bcadfc362b69078beee0088e5936c98b
|
||||||
content/29: b82def7d82657f941fbe60df3924eeeb
|
content/29: b82def7d82657f941fbe60df3924eeeb
|
||||||
content/30: 1ca7ee3856805fa1718031c5f75b6ffb
|
content/30: 1ca7ee3856805fa1718031c5f75b6ffb
|
||||||
@@ -2521,9 +2521,9 @@ checksums:
|
|||||||
content/22: ef92d95455e378abe4d27a1cdc5e1aed
|
content/22: ef92d95455e378abe4d27a1cdc5e1aed
|
||||||
content/23: febd6019055f3754953fd93395d0dbf2
|
content/23: febd6019055f3754953fd93395d0dbf2
|
||||||
content/24: 371d0e46b4bd2c23f559b8bc112f6955
|
content/24: 371d0e46b4bd2c23f559b8bc112f6955
|
||||||
content/25: 7ef3f388e5ee9346bac54c771d825f40
|
content/25: caf6acbe2a4495ca055cb9006ce47250
|
||||||
content/26: bcadfc362b69078beee0088e5936c98b
|
content/26: bcadfc362b69078beee0088e5936c98b
|
||||||
content/27: e0fa91c45aa780fc03e91df77417f893
|
content/27: 57662dd91f8d1d807377fd48fa0e9142
|
||||||
content/28: b463f54cd5fe2458b5842549fbb5e1ce
|
content/28: b463f54cd5fe2458b5842549fbb5e1ce
|
||||||
content/29: 55f8c724e1a2463bc29a32518a512c73
|
content/29: 55f8c724e1a2463bc29a32518a512c73
|
||||||
content/30: 371d0e46b4bd2c23f559b8bc112f6955
|
content/30: 371d0e46b4bd2c23f559b8bc112f6955
|
||||||
@@ -2638,8 +2638,14 @@ checksums:
|
|||||||
content/139: 33fde4c3da4584b51f06183b7b192a78
|
content/139: 33fde4c3da4584b51f06183b7b192a78
|
||||||
content/140: bcadfc362b69078beee0088e5936c98b
|
content/140: bcadfc362b69078beee0088e5936c98b
|
||||||
content/141: b7451190f100388d999c183958d787a7
|
content/141: b7451190f100388d999c183958d787a7
|
||||||
content/142: b3f310d5ef115bea5a8b75bf25d7ea9a
|
content/142: d0f9e799e2e5cc62de60668d35fd846f
|
||||||
content/143: 4930918f803340baa861bed9cdf789de
|
content/143: b19069ff19899fe202217e06e002c447
|
||||||
|
content/144: 371d0e46b4bd2c23f559b8bc112f6955
|
||||||
|
content/145: 480fd62f8d9cc18467e82f4c3f70beea
|
||||||
|
content/146: bcadfc362b69078beee0088e5936c98b
|
||||||
|
content/147: 4e73a65d3b873f3979587e10a0f39e72
|
||||||
|
content/148: b3f310d5ef115bea5a8b75bf25d7ea9a
|
||||||
|
content/149: 4930918f803340baa861bed9cdf789de
|
||||||
8f76e389f6226f608571622b015ca6a1:
|
8f76e389f6226f608571622b015ca6a1:
|
||||||
meta/title: ddfe2191ea61b34d8b7cc1d7c19b94ac
|
meta/title: ddfe2191ea61b34d8b7cc1d7c19b94ac
|
||||||
meta/description: 049ff551f2ebabb15cdea0c71bd8e4eb
|
meta/description: 049ff551f2ebabb15cdea0c71bd8e4eb
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
|
|||||||
|
|
||||||
const logger = createLogger('CopilotChatsListAPI')
|
const logger = createLogger('CopilotChatsListAPI')
|
||||||
|
|
||||||
export async function GET(_req: NextRequest) {
|
export async function GET(_request: NextRequest) {
|
||||||
try {
|
try {
|
||||||
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
|
||||||
if (!isAuthenticated || !userId) {
|
if (!isAuthenticated || !userId) {
|
||||||
|
|||||||
@@ -38,14 +38,13 @@ export async function GET(
|
|||||||
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
|
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
|
||||||
|
|
||||||
const contextParam = request.nextUrl.searchParams.get('context')
|
const contextParam = request.nextUrl.searchParams.get('context')
|
||||||
const legacyBucketType = request.nextUrl.searchParams.get('bucket')
|
|
||||||
|
|
||||||
const context = contextParam || (isCloudPath ? inferContextFromKey(cloudKey) : undefined)
|
const context = contextParam || (isCloudPath ? inferContextFromKey(cloudKey) : undefined)
|
||||||
|
|
||||||
if (context === 'profile-pictures') {
|
if (context === 'profile-pictures' || context === 'og-images') {
|
||||||
logger.info('Serving public profile picture:', { cloudKey })
|
logger.info(`Serving public ${context}:`, { cloudKey })
|
||||||
if (isUsingCloudStorage() || isCloudPath) {
|
if (isUsingCloudStorage() || isCloudPath) {
|
||||||
return await handleCloudProxyPublic(cloudKey, context, legacyBucketType)
|
return await handleCloudProxyPublic(cloudKey, context)
|
||||||
}
|
}
|
||||||
return await handleLocalFilePublic(fullPath)
|
return await handleLocalFilePublic(fullPath)
|
||||||
}
|
}
|
||||||
@@ -182,8 +181,7 @@ async function handleCloudProxy(
|
|||||||
|
|
||||||
async function handleCloudProxyPublic(
|
async function handleCloudProxyPublic(
|
||||||
cloudKey: string,
|
cloudKey: string,
|
||||||
context: StorageContext,
|
context: StorageContext
|
||||||
legacyBucketType?: string | null
|
|
||||||
): Promise<NextResponse> {
|
): Promise<NextResponse> {
|
||||||
try {
|
try {
|
||||||
let fileBuffer: Buffer
|
let fileBuffer: Buffer
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
import { runs } from '@trigger.dev/sdk'
|
import { runs } from '@trigger.dev/sdk'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-key/service'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { getSession } from '@/lib/auth'
|
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { createErrorResponse } from '@/app/api/workflows/utils'
|
import { createErrorResponse } from '@/app/api/workflows/utils'
|
||||||
@@ -18,38 +17,44 @@ export async function GET(
|
|||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
|
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
|
||||||
|
|
||||||
// Try session auth first (for web UI)
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
const session = await getSession()
|
if (!authResult.success || !authResult.userId) {
|
||||||
let authenticatedUserId: string | null = session?.user?.id || null
|
logger.warn(`[${requestId}] Unauthorized task status request`)
|
||||||
|
return createErrorResponse(authResult.error || 'Authentication required', 401)
|
||||||
if (!authenticatedUserId) {
|
|
||||||
const apiKeyHeader = request.headers.get('x-api-key')
|
|
||||||
if (apiKeyHeader) {
|
|
||||||
const authResult = await authenticateApiKeyFromHeader(apiKeyHeader)
|
|
||||||
if (authResult.success && authResult.userId) {
|
|
||||||
authenticatedUserId = authResult.userId
|
|
||||||
if (authResult.keyId) {
|
|
||||||
await updateApiKeyLastUsed(authResult.keyId).catch((error) => {
|
|
||||||
logger.warn(`[${requestId}] Failed to update API key last used timestamp:`, {
|
|
||||||
keyId: authResult.keyId,
|
|
||||||
error,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!authenticatedUserId) {
|
const authenticatedUserId = authResult.userId
|
||||||
return createErrorResponse('Authentication required', 401)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch task status from Trigger.dev
|
|
||||||
const run = await runs.retrieve(taskId)
|
const run = await runs.retrieve(taskId)
|
||||||
|
|
||||||
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
|
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
|
||||||
|
|
||||||
// Map Trigger.dev status to our format
|
const payload = run.payload as any
|
||||||
|
if (payload?.workflowId) {
|
||||||
|
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
|
||||||
|
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
|
||||||
|
if (!accessCheck.hasAccess) {
|
||||||
|
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
|
||||||
|
workflowId: payload.workflowId,
|
||||||
|
})
|
||||||
|
return createErrorResponse('Access denied', 403)
|
||||||
|
}
|
||||||
|
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
|
||||||
|
} else {
|
||||||
|
if (payload?.userId && payload.userId !== authenticatedUserId) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
|
||||||
|
)
|
||||||
|
return createErrorResponse('Access denied', 403)
|
||||||
|
}
|
||||||
|
if (!payload?.userId) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
|
||||||
|
)
|
||||||
|
return createErrorResponse('Access denied', 403)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const statusMap = {
|
const statusMap = {
|
||||||
QUEUED: 'queued',
|
QUEUED: 'queued',
|
||||||
WAITING_FOR_DEPLOY: 'queued',
|
WAITING_FOR_DEPLOY: 'queued',
|
||||||
@@ -67,7 +72,6 @@ export async function GET(
|
|||||||
|
|
||||||
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
|
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
|
||||||
|
|
||||||
// Build response based on status
|
|
||||||
const response: any = {
|
const response: any = {
|
||||||
success: true,
|
success: true,
|
||||||
taskId,
|
taskId,
|
||||||
@@ -77,21 +81,18 @@ export async function GET(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add completion details if finished
|
|
||||||
if (mappedStatus === 'completed') {
|
if (mappedStatus === 'completed') {
|
||||||
response.output = run.output // This contains the workflow execution results
|
response.output = run.output // This contains the workflow execution results
|
||||||
response.metadata.completedAt = run.finishedAt
|
response.metadata.completedAt = run.finishedAt
|
||||||
response.metadata.duration = run.durationMs
|
response.metadata.duration = run.durationMs
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add error details if failed
|
|
||||||
if (mappedStatus === 'failed') {
|
if (mappedStatus === 'failed') {
|
||||||
response.error = run.error
|
response.error = run.error
|
||||||
response.metadata.completedAt = run.finishedAt
|
response.metadata.completedAt = run.finishedAt
|
||||||
response.metadata.duration = run.durationMs
|
response.metadata.duration = run.durationMs
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add progress info if still processing
|
|
||||||
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
|
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
|
||||||
response.estimatedDuration = 180000 // 3 minutes max from our config
|
response.estimatedDuration = 180000 // 3 minutes max from our config
|
||||||
}
|
}
|
||||||
@@ -107,6 +108,3 @@ export async function GET(
|
|||||||
return createErrorResponse('Failed to fetch task status', 500)
|
return createErrorResponse('Failed to fetch task status', 500)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Implement task cancellation via Trigger.dev API if needed
|
|
||||||
// export async function DELETE() { ... }
|
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ const UpdateKnowledgeBaseSchema = z.object({
|
|||||||
.optional(),
|
.optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
||||||
@@ -133,7 +133,10 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function DELETE(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function DELETE(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
||||||
|
|||||||
@@ -1,32 +1,72 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema'
|
import {
|
||||||
import { eq } from 'drizzle-orm'
|
permissions,
|
||||||
|
workflow,
|
||||||
|
workflowExecutionLogs,
|
||||||
|
workflowExecutionSnapshots,
|
||||||
|
} from '@sim/db/schema'
|
||||||
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
const logger = createLogger('LogsByExecutionIdAPI')
|
const logger = createLogger('LogsByExecutionIdAPI')
|
||||||
|
|
||||||
export async function GET(
|
export async function GET(
|
||||||
_request: NextRequest,
|
request: NextRequest,
|
||||||
{ params }: { params: Promise<{ executionId: string }> }
|
{ params }: { params: Promise<{ executionId: string }> }
|
||||||
) {
|
) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const { executionId } = await params
|
const { executionId } = await params
|
||||||
|
|
||||||
logger.debug(`Fetching execution data for: ${executionId}`)
|
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized execution data access attempt for: ${executionId}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: authResult.error || 'Authentication required' },
|
||||||
|
{ status: 401 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const authenticatedUserId = authResult.userId
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
`[${requestId}] Fetching execution data for: ${executionId} (auth: ${authResult.authType})`
|
||||||
|
)
|
||||||
|
|
||||||
// Get the workflow execution log to find the snapshot
|
|
||||||
const [workflowLog] = await db
|
const [workflowLog] = await db
|
||||||
.select()
|
.select({
|
||||||
|
id: workflowExecutionLogs.id,
|
||||||
|
workflowId: workflowExecutionLogs.workflowId,
|
||||||
|
executionId: workflowExecutionLogs.executionId,
|
||||||
|
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
|
||||||
|
trigger: workflowExecutionLogs.trigger,
|
||||||
|
startedAt: workflowExecutionLogs.startedAt,
|
||||||
|
endedAt: workflowExecutionLogs.endedAt,
|
||||||
|
totalDurationMs: workflowExecutionLogs.totalDurationMs,
|
||||||
|
cost: workflowExecutionLogs.cost,
|
||||||
|
})
|
||||||
.from(workflowExecutionLogs)
|
.from(workflowExecutionLogs)
|
||||||
|
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
|
||||||
|
.innerJoin(
|
||||||
|
permissions,
|
||||||
|
and(
|
||||||
|
eq(permissions.entityType, 'workspace'),
|
||||||
|
eq(permissions.entityId, workflow.workspaceId),
|
||||||
|
eq(permissions.userId, authenticatedUserId)
|
||||||
|
)
|
||||||
|
)
|
||||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
if (!workflowLog) {
|
if (!workflowLog) {
|
||||||
|
logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`)
|
||||||
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the workflow state snapshot
|
|
||||||
const [snapshot] = await db
|
const [snapshot] = await db
|
||||||
.select()
|
.select()
|
||||||
.from(workflowExecutionSnapshots)
|
.from(workflowExecutionSnapshots)
|
||||||
@@ -34,6 +74,7 @@ export async function GET(
|
|||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
if (!snapshot) {
|
if (!snapshot) {
|
||||||
|
logger.warn(`[${requestId}] Workflow state snapshot not found for execution: ${executionId}`)
|
||||||
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,14 +91,14 @@ export async function GET(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`Successfully fetched execution data for: ${executionId}`)
|
logger.debug(`[${requestId}] Successfully fetched execution data for: ${executionId}`)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
`[${requestId}] Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
|
||||||
)
|
)
|
||||||
|
|
||||||
return NextResponse.json(response)
|
return NextResponse.json(response)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Error fetching execution data:', error)
|
logger.error(`[${requestId}] Error fetching execution data:`, error)
|
||||||
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
|
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,8 +3,10 @@ import { memory, workflowBlocks } from '@sim/db/schema'
|
|||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
|
||||||
|
|
||||||
const logger = createLogger('MemoryByIdAPI')
|
const logger = createLogger('MemoryByIdAPI')
|
||||||
|
|
||||||
@@ -65,6 +67,65 @@ const memoryPutBodySchema = z.object({
|
|||||||
workflowId: z.string().uuid('Invalid workflow ID format'),
|
workflowId: z.string().uuid('Invalid workflow ID format'),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates authentication and workflow access for memory operations
|
||||||
|
* @param request - The incoming request
|
||||||
|
* @param workflowId - The workflow ID to check access for
|
||||||
|
* @param requestId - Request ID for logging
|
||||||
|
* @param action - 'read' for GET, 'write' for PUT/DELETE
|
||||||
|
* @returns Object with userId if successful, or error response if failed
|
||||||
|
*/
|
||||||
|
async function validateMemoryAccess(
|
||||||
|
request: NextRequest,
|
||||||
|
workflowId: string,
|
||||||
|
requestId: string,
|
||||||
|
action: 'read' | 'write'
|
||||||
|
): Promise<{ userId: string } | { error: NextResponse }> {
|
||||||
|
const authResult = await checkHybridAuth(request, {
|
||||||
|
requireWorkflowId: false,
|
||||||
|
})
|
||||||
|
if (!authResult.success || !authResult.userId) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized memory ${action} attempt`)
|
||||||
|
return {
|
||||||
|
error: NextResponse.json(
|
||||||
|
{ success: false, error: { message: 'Authentication required' } },
|
||||||
|
{ status: 401 }
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const accessContext = await getWorkflowAccessContext(workflowId, authResult.userId)
|
||||||
|
if (!accessContext) {
|
||||||
|
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
|
||||||
|
return {
|
||||||
|
error: NextResponse.json(
|
||||||
|
{ success: false, error: { message: 'Workflow not found' } },
|
||||||
|
{ status: 404 }
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { isOwner, workspacePermission } = accessContext
|
||||||
|
const hasAccess =
|
||||||
|
action === 'read'
|
||||||
|
? isOwner || workspacePermission !== null
|
||||||
|
: isOwner || workspacePermission === 'write' || workspacePermission === 'admin'
|
||||||
|
|
||||||
|
if (!hasAccess) {
|
||||||
|
logger.warn(
|
||||||
|
`[${requestId}] User ${authResult.userId} denied ${action} access to workflow ${workflowId}`
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
error: NextResponse.json(
|
||||||
|
{ success: false, error: { message: 'Access denied' } },
|
||||||
|
{ status: 403 }
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { userId: authResult.userId }
|
||||||
|
}
|
||||||
|
|
||||||
export const dynamic = 'force-dynamic'
|
export const dynamic = 'force-dynamic'
|
||||||
export const runtime = 'nodejs'
|
export const runtime = 'nodejs'
|
||||||
|
|
||||||
@@ -101,6 +162,11 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
const { workflowId: validatedWorkflowId } = validation.data
|
const { workflowId: validatedWorkflowId } = validation.data
|
||||||
|
|
||||||
|
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'read')
|
||||||
|
if ('error' in accessCheck) {
|
||||||
|
return accessCheck.error
|
||||||
|
}
|
||||||
|
|
||||||
const memories = await db
|
const memories = await db
|
||||||
.select()
|
.select()
|
||||||
.from(memory)
|
.from(memory)
|
||||||
@@ -203,6 +269,11 @@ export async function DELETE(
|
|||||||
|
|
||||||
const { workflowId: validatedWorkflowId } = validation.data
|
const { workflowId: validatedWorkflowId } = validation.data
|
||||||
|
|
||||||
|
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
|
||||||
|
if ('error' in accessCheck) {
|
||||||
|
return accessCheck.error
|
||||||
|
}
|
||||||
|
|
||||||
const existingMemory = await db
|
const existingMemory = await db
|
||||||
.select({ id: memory.id })
|
.select({ id: memory.id })
|
||||||
.from(memory)
|
.from(memory)
|
||||||
@@ -296,6 +367,11 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
|
||||||
|
if ('error' in accessCheck) {
|
||||||
|
return accessCheck.error
|
||||||
|
}
|
||||||
|
|
||||||
const existingMemories = await db
|
const existingMemories = await db
|
||||||
.select()
|
.select()
|
||||||
.from(memory)
|
.from(memory)
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ const updateInvitationSchema = z.object({
|
|||||||
|
|
||||||
// Get invitation details
|
// Get invitation details
|
||||||
export async function GET(
|
export async function GET(
|
||||||
_req: NextRequest,
|
_request: NextRequest,
|
||||||
{ params }: { params: Promise<{ id: string; invitationId: string }> }
|
{ params }: { params: Promise<{ id: string; invitationId: string }> }
|
||||||
) {
|
) {
|
||||||
const { id: organizationId, invitationId } = await params
|
const { id: organizationId, invitationId } = await params
|
||||||
|
|||||||
@@ -1,16 +1,19 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { templates, user } from '@sim/db/schema'
|
import { templates } from '@sim/db/schema'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||||
|
|
||||||
const logger = createLogger('TemplateApprovalAPI')
|
const logger = createLogger('TemplateApprovalAPI')
|
||||||
|
|
||||||
export const revalidate = 0
|
export const revalidate = 0
|
||||||
|
|
||||||
// POST /api/templates/[id]/approve - Approve a template (super users only)
|
/**
|
||||||
|
* POST /api/templates/[id]/approve - Approve a template (super users only)
|
||||||
|
*/
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
@@ -22,23 +25,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user is a super user
|
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
if (!isSuperUser) {
|
||||||
|
|
||||||
if (!currentUser[0]?.isSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
|
||||||
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if template exists
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
|
|
||||||
if (existingTemplate.length === 0) {
|
if (existingTemplate.length === 0) {
|
||||||
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
|
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update template status to approved
|
|
||||||
await db
|
await db
|
||||||
.update(templates)
|
.update(templates)
|
||||||
.set({ status: 'approved', updatedAt: new Date() })
|
.set({ status: 'approved', updatedAt: new Date() })
|
||||||
@@ -56,9 +54,11 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// POST /api/templates/[id]/reject - Reject a template (super users only)
|
/**
|
||||||
|
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
|
||||||
|
*/
|
||||||
export async function DELETE(
|
export async function DELETE(
|
||||||
request: NextRequest,
|
_request: NextRequest,
|
||||||
{ params }: { params: Promise<{ id: string }> }
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
) {
|
) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
@@ -71,23 +71,18 @@ export async function DELETE(
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user is a super user
|
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
if (!isSuperUser) {
|
||||||
|
|
||||||
if (!currentUser[0]?.isSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if template exists
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
|
|
||||||
if (existingTemplate.length === 0) {
|
if (existingTemplate.length === 0) {
|
||||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update template status to rejected
|
|
||||||
await db
|
await db
|
||||||
.update(templates)
|
.update(templates)
|
||||||
.set({ status: 'rejected', updatedAt: new Date() })
|
.set({ status: 'rejected', updatedAt: new Date() })
|
||||||
|
|||||||
142
apps/sim/app/api/templates/[id]/og-image/route.ts
Normal file
142
apps/sim/app/api/templates/[id]/og-image/route.ts
Normal file
@@ -0,0 +1,142 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { templates } from '@sim/db/schema'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { verifyTemplateOwnership } from '@/lib/templates/permissions'
|
||||||
|
import { uploadFile } from '@/lib/uploads/core/storage-service'
|
||||||
|
import { isValidPng } from '@/lib/uploads/utils/validation'
|
||||||
|
|
||||||
|
const logger = createLogger('TemplateOGImageAPI')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT /api/templates/[id]/og-image
|
||||||
|
* Upload a pre-generated OG image for a template.
|
||||||
|
* Accepts base64-encoded image data in the request body.
|
||||||
|
*/
|
||||||
|
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
const { id } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
logger.warn(`[${requestId}] Unauthorized OG image upload attempt for template: ${id}`)
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { authorized, error, status } = await verifyTemplateOwnership(
|
||||||
|
id,
|
||||||
|
session.user.id,
|
||||||
|
'admin'
|
||||||
|
)
|
||||||
|
if (!authorized) {
|
||||||
|
logger.warn(`[${requestId}] User denied permission to upload OG image for template ${id}`)
|
||||||
|
return NextResponse.json({ error }, { status: status || 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = await request.json()
|
||||||
|
const { imageData } = body
|
||||||
|
|
||||||
|
if (!imageData || typeof imageData !== 'string') {
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Missing or invalid imageData (expected base64 string)' },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const base64Data = imageData.includes(',') ? imageData.split(',')[1] : imageData
|
||||||
|
const imageBuffer = Buffer.from(base64Data, 'base64')
|
||||||
|
|
||||||
|
if (!isValidPng(imageBuffer)) {
|
||||||
|
return NextResponse.json({ error: 'Invalid PNG image data' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const maxSize = 5 * 1024 * 1024
|
||||||
|
if (imageBuffer.length > maxSize) {
|
||||||
|
return NextResponse.json({ error: 'Image too large. Maximum size is 5MB.' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const timestamp = Date.now()
|
||||||
|
const storageKey = `og-images/templates/${id}/${timestamp}.png`
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Uploading OG image for template ${id}: ${storageKey}`)
|
||||||
|
|
||||||
|
const uploadResult = await uploadFile({
|
||||||
|
file: imageBuffer,
|
||||||
|
fileName: storageKey,
|
||||||
|
contentType: 'image/png',
|
||||||
|
context: 'og-images',
|
||||||
|
preserveKey: true,
|
||||||
|
customKey: storageKey,
|
||||||
|
})
|
||||||
|
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
|
const ogImageUrl = `${baseUrl}${uploadResult.path}?context=og-images`
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(templates)
|
||||||
|
.set({
|
||||||
|
ogImageUrl,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(templates.id, id))
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Successfully uploaded OG image for template ${id}: ${ogImageUrl}`)
|
||||||
|
|
||||||
|
return NextResponse.json({
|
||||||
|
success: true,
|
||||||
|
ogImageUrl,
|
||||||
|
})
|
||||||
|
} catch (error: unknown) {
|
||||||
|
logger.error(`[${requestId}] Error uploading OG image for template ${id}:`, error)
|
||||||
|
return NextResponse.json({ error: 'Failed to upload OG image' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /api/templates/[id]/og-image
|
||||||
|
* Remove the OG image for a template.
|
||||||
|
*/
|
||||||
|
export async function DELETE(
|
||||||
|
_request: NextRequest,
|
||||||
|
{ params }: { params: Promise<{ id: string }> }
|
||||||
|
) {
|
||||||
|
const requestId = generateRequestId()
|
||||||
|
const { id } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const session = await getSession()
|
||||||
|
if (!session?.user?.id) {
|
||||||
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { authorized, error, status } = await verifyTemplateOwnership(
|
||||||
|
id,
|
||||||
|
session.user.id,
|
||||||
|
'admin'
|
||||||
|
)
|
||||||
|
if (!authorized) {
|
||||||
|
logger.warn(`[${requestId}] User denied permission to delete OG image for template ${id}`)
|
||||||
|
return NextResponse.json({ error }, { status: status || 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(templates)
|
||||||
|
.set({
|
||||||
|
ogImageUrl: null,
|
||||||
|
updatedAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(templates.id, id))
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Removed OG image for template ${id}`)
|
||||||
|
|
||||||
|
return NextResponse.json({ success: true })
|
||||||
|
} catch (error: unknown) {
|
||||||
|
logger.error(`[${requestId}] Error removing OG image for template ${id}:`, error)
|
||||||
|
return NextResponse.json({ error: 'Failed to remove OG image' }, { status: 500 })
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,19 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { templates, user } from '@sim/db/schema'
|
import { templates } from '@sim/db/schema'
|
||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { verifySuperUser } from '@/lib/templates/permissions'
|
||||||
|
|
||||||
const logger = createLogger('TemplateRejectionAPI')
|
const logger = createLogger('TemplateRejectionAPI')
|
||||||
|
|
||||||
export const revalidate = 0
|
export const revalidate = 0
|
||||||
|
|
||||||
// POST /api/templates/[id]/reject - Reject a template (super users only)
|
/**
|
||||||
|
* POST /api/templates/[id]/reject - Reject a template (super users only)
|
||||||
|
*/
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
@@ -22,23 +25,18 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user is a super user
|
const { isSuperUser } = await verifySuperUser(session.user.id)
|
||||||
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
|
if (!isSuperUser) {
|
||||||
|
|
||||||
if (!currentUser[0]?.isSuperUser) {
|
|
||||||
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
|
||||||
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if template exists
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
|
|
||||||
if (existingTemplate.length === 0) {
|
if (existingTemplate.length === 0) {
|
||||||
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update template status to rejected
|
|
||||||
await db
|
await db
|
||||||
.update(templates)
|
.update(templates)
|
||||||
.set({ status: 'rejected', updatedAt: new Date() })
|
.set({ status: 'rejected', updatedAt: new Date() })
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { member, templateCreators, templates, workflow } from '@sim/db/schema'
|
import { templateCreators, templates, workflow } from '@sim/db/schema'
|
||||||
import { and, eq, or, sql } from 'drizzle-orm'
|
import { eq, sql } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
@@ -15,7 +15,6 @@ const logger = createLogger('TemplateByIdAPI')
|
|||||||
|
|
||||||
export const revalidate = 0
|
export const revalidate = 0
|
||||||
|
|
||||||
// GET /api/templates/[id] - Retrieve a single template by ID
|
|
||||||
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
@@ -25,7 +24,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
logger.debug(`[${requestId}] Fetching template: ${id}`)
|
logger.debug(`[${requestId}] Fetching template: ${id}`)
|
||||||
|
|
||||||
// Fetch the template by ID with creator info
|
|
||||||
const result = await db
|
const result = await db
|
||||||
.select({
|
.select({
|
||||||
template: templates,
|
template: templates,
|
||||||
@@ -47,12 +45,10 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
creator: creator || undefined,
|
creator: creator || undefined,
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only show approved templates to non-authenticated users
|
|
||||||
if (!session?.user?.id && template.status !== 'approved') {
|
if (!session?.user?.id && template.status !== 'approved') {
|
||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if user has starred (only if authenticated)
|
|
||||||
let isStarred = false
|
let isStarred = false
|
||||||
if (session?.user?.id) {
|
if (session?.user?.id) {
|
||||||
const { templateStars } = await import('@sim/db/schema')
|
const { templateStars } = await import('@sim/db/schema')
|
||||||
@@ -80,7 +76,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
logger.debug(`[${requestId}] Incremented view count for template: ${id}`)
|
logger.debug(`[${requestId}] Incremented view count for template: ${id}`)
|
||||||
} catch (viewError) {
|
} catch (viewError) {
|
||||||
// Log the error but don't fail the request
|
|
||||||
logger.warn(`[${requestId}] Failed to increment view count for template: ${id}`, viewError)
|
logger.warn(`[${requestId}] Failed to increment view count for template: ${id}`, viewError)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -138,7 +133,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
|
|
||||||
const { name, details, creatorId, tags, updateState } = validationResult.data
|
const { name, details, creatorId, tags, updateState } = validationResult.data
|
||||||
|
|
||||||
// Check if template exists
|
|
||||||
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
|
|
||||||
if (existingTemplate.length === 0) {
|
if (existingTemplate.length === 0) {
|
||||||
@@ -146,32 +140,54 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// No permission check needed - template updates only happen from within the workspace
|
const template = existingTemplate[0]
|
||||||
// where the user is already editing the connected workflow
|
|
||||||
|
if (!template.creatorId) {
|
||||||
|
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
|
||||||
|
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||||
|
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||||
|
session.user.id,
|
||||||
|
template.creatorId,
|
||||||
|
'admin'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasPermission) {
|
||||||
|
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
|
||||||
|
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
// Prepare update data - only include fields that were provided
|
|
||||||
const updateData: any = {
|
const updateData: any = {
|
||||||
updatedAt: new Date(),
|
updatedAt: new Date(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Only update fields that were provided
|
|
||||||
if (name !== undefined) updateData.name = name
|
if (name !== undefined) updateData.name = name
|
||||||
if (details !== undefined) updateData.details = details
|
if (details !== undefined) updateData.details = details
|
||||||
if (tags !== undefined) updateData.tags = tags
|
if (tags !== undefined) updateData.tags = tags
|
||||||
if (creatorId !== undefined) updateData.creatorId = creatorId
|
if (creatorId !== undefined) updateData.creatorId = creatorId
|
||||||
|
|
||||||
// Only update the state if explicitly requested and the template has a connected workflow
|
if (updateState && template.workflowId) {
|
||||||
if (updateState && existingTemplate[0].workflowId) {
|
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
|
||||||
// Load the current workflow state from normalized tables
|
const { hasAccess: hasWorkflowAccess } = await verifyWorkflowAccess(
|
||||||
|
session.user.id,
|
||||||
|
template.workflowId
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasWorkflowAccess) {
|
||||||
|
logger.warn(`[${requestId}] User denied workflow access for state sync on template ${id}`)
|
||||||
|
return NextResponse.json({ error: 'Access denied to workflow' }, { status: 403 })
|
||||||
|
}
|
||||||
|
|
||||||
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils')
|
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils')
|
||||||
const normalizedData = await loadWorkflowFromNormalizedTables(existingTemplate[0].workflowId)
|
const normalizedData = await loadWorkflowFromNormalizedTables(template.workflowId)
|
||||||
|
|
||||||
if (normalizedData) {
|
if (normalizedData) {
|
||||||
// Also fetch workflow variables
|
|
||||||
const [workflowRecord] = await db
|
const [workflowRecord] = await db
|
||||||
.select({ variables: workflow.variables })
|
.select({ variables: workflow.variables })
|
||||||
.from(workflow)
|
.from(workflow)
|
||||||
.where(eq(workflow.id, existingTemplate[0].workflowId))
|
.where(eq(workflow.id, template.workflowId))
|
||||||
.limit(1)
|
.limit(1)
|
||||||
|
|
||||||
const currentState = {
|
const currentState = {
|
||||||
@@ -183,17 +199,15 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
|||||||
lastSaved: Date.now(),
|
lastSaved: Date.now(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract credential requirements from the new state
|
|
||||||
const requiredCredentials = extractRequiredCredentials(currentState)
|
const requiredCredentials = extractRequiredCredentials(currentState)
|
||||||
|
|
||||||
// Sanitize the state before storing
|
|
||||||
const sanitizedState = sanitizeCredentials(currentState)
|
const sanitizedState = sanitizeCredentials(currentState)
|
||||||
|
|
||||||
updateData.state = sanitizedState
|
updateData.state = sanitizedState
|
||||||
updateData.requiredCredentials = requiredCredentials
|
updateData.requiredCredentials = requiredCredentials
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
`[${requestId}] Updating template state and credentials from current workflow: ${existingTemplate[0].workflowId}`
|
`[${requestId}] Updating template state and credentials from current workflow: ${template.workflowId}`
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
logger.warn(`[${requestId}] Could not load workflow state for template: ${id}`)
|
logger.warn(`[${requestId}] Could not load workflow state for template: ${id}`)
|
||||||
@@ -233,7 +247,6 @@ export async function DELETE(
|
|||||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch template
|
|
||||||
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
|
||||||
if (existing.length === 0) {
|
if (existing.length === 0) {
|
||||||
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
|
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
|
||||||
@@ -242,41 +255,21 @@ export async function DELETE(
|
|||||||
|
|
||||||
const template = existing[0]
|
const template = existing[0]
|
||||||
|
|
||||||
// Permission: Only admin/owner of creator profile can delete
|
if (!template.creatorId) {
|
||||||
if (template.creatorId) {
|
logger.warn(`[${requestId}] Template ${id} has no creator, denying delete`)
|
||||||
const creatorProfile = await db
|
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
||||||
.select()
|
}
|
||||||
.from(templateCreators)
|
|
||||||
.where(eq(templateCreators.id, template.creatorId))
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (creatorProfile.length > 0) {
|
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||||
const creator = creatorProfile[0]
|
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||||
let hasPermission = false
|
session.user.id,
|
||||||
|
template.creatorId,
|
||||||
|
'admin'
|
||||||
|
)
|
||||||
|
|
||||||
if (creator.referenceType === 'user') {
|
if (!hasPermission) {
|
||||||
hasPermission = creator.referenceId === session.user.id
|
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
|
||||||
} else if (creator.referenceType === 'organization') {
|
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||||
// For delete, require admin/owner role
|
|
||||||
const membership = await db
|
|
||||||
.select()
|
|
||||||
.from(member)
|
|
||||||
.where(
|
|
||||||
and(
|
|
||||||
eq(member.userId, session.user.id),
|
|
||||||
eq(member.organizationId, creator.referenceId),
|
|
||||||
or(eq(member.role, 'admin'), eq(member.role, 'owner'))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.limit(1)
|
|
||||||
hasPermission = membership.length > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!hasPermission) {
|
|
||||||
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
|
|
||||||
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await db.delete(templates).where(eq(templates.id, id))
|
await db.delete(templates).where(eq(templates.id, id))
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import {
|
import {
|
||||||
member,
|
|
||||||
templateCreators,
|
templateCreators,
|
||||||
templateStars,
|
templateStars,
|
||||||
templates,
|
templates,
|
||||||
@@ -204,51 +203,18 @@ export async function POST(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate creator profile - required for all templates
|
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
|
||||||
const creatorProfile = await db
|
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
|
||||||
.select()
|
session.user.id,
|
||||||
.from(templateCreators)
|
data.creatorId,
|
||||||
.where(eq(templateCreators.id, data.creatorId))
|
'member'
|
||||||
.limit(1)
|
)
|
||||||
|
|
||||||
if (creatorProfile.length === 0) {
|
if (!hasPermission) {
|
||||||
logger.warn(`[${requestId}] Creator profile not found: ${data.creatorId}`)
|
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
|
||||||
return NextResponse.json({ error: 'Creator profile not found' }, { status: 404 })
|
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
|
||||||
const creator = creatorProfile[0]
|
|
||||||
|
|
||||||
// Verify user has permission to use this creator profile
|
|
||||||
if (creator.referenceType === 'user') {
|
|
||||||
if (creator.referenceId !== session.user.id) {
|
|
||||||
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: 'You do not have permission to use this creator profile' },
|
|
||||||
{ status: 403 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
} else if (creator.referenceType === 'organization') {
|
|
||||||
// Verify user is a member of the organization
|
|
||||||
const membership = await db
|
|
||||||
.select()
|
|
||||||
.from(member)
|
|
||||||
.where(
|
|
||||||
and(eq(member.userId, session.user.id), eq(member.organizationId, creator.referenceId))
|
|
||||||
)
|
|
||||||
.limit(1)
|
|
||||||
|
|
||||||
if (membership.length === 0) {
|
|
||||||
logger.warn(
|
|
||||||
`[${requestId}] User not a member of organization for creator: ${data.creatorId}`
|
|
||||||
)
|
|
||||||
return NextResponse.json(
|
|
||||||
{ error: 'You must be a member of the organization to use its creator profile' },
|
|
||||||
{ status: 403 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the template
|
|
||||||
const templateId = uuidv4()
|
const templateId = uuidv4()
|
||||||
const now = new Date()
|
const now = new Date()
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -108,6 +109,14 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (folderId) {
|
||||||
|
const folderIdValidation = validateAlphanumericId(folderId, 'folderId', 50)
|
||||||
|
if (!folderIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid folderId`, { error: folderIdValidation.error })
|
||||||
|
return NextResponse.json({ error: folderIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const qParts: string[] = ['trashed = false']
|
const qParts: string[] = ['trashed = false']
|
||||||
if (folderId) {
|
if (folderId) {
|
||||||
qParts.push(`'${escapeForDriveQuery(folderId)}' in parents`)
|
qParts.push(`'${escapeForDriveQuery(folderId)}' in parents`)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
@@ -50,6 +51,29 @@ export async function POST(request: NextRequest) {
|
|||||||
.map((id) => id.trim())
|
.map((id) => id.trim())
|
||||||
.filter((id) => id.length > 0)
|
.filter((id) => id.length > 0)
|
||||||
|
|
||||||
|
for (const labelId of labelIds) {
|
||||||
|
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
||||||
|
if (!labelIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: labelIdValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
|
||||||
|
if (!messageIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: messageIdValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const gmailResponse = await fetch(
|
const gmailResponse = await fetch(
|
||||||
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
|
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -38,6 +39,12 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId', 255)
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credential ID: ${credentialIdValidation.error}`)
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
let credentials = await db
|
let credentials = await db
|
||||||
.select()
|
.select()
|
||||||
.from(account)
|
.from(account)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
@@ -53,6 +54,29 @@ export async function POST(request: NextRequest) {
|
|||||||
.map((id) => id.trim())
|
.map((id) => id.trim())
|
||||||
.filter((id) => id.length > 0)
|
.filter((id) => id.length > 0)
|
||||||
|
|
||||||
|
for (const labelId of labelIds) {
|
||||||
|
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
|
||||||
|
if (!labelIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: labelIdValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
|
||||||
|
if (!messageIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
|
||||||
|
return NextResponse.json(
|
||||||
|
{ success: false, error: messageIdValidation.error },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const gmailResponse = await fetch(
|
const gmailResponse = await fetch(
|
||||||
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
|
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateUUID } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -25,7 +26,6 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.info(`[${requestId}] Google Calendar calendars request received`)
|
logger.info(`[${requestId}] Google Calendar calendars request received`)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Get the credential ID from the query params
|
|
||||||
const { searchParams } = new URL(request.url)
|
const { searchParams } = new URL(request.url)
|
||||||
const credentialId = searchParams.get('credentialId')
|
const credentialId = searchParams.get('credentialId')
|
||||||
const workflowId = searchParams.get('workflowId') || undefined
|
const workflowId = searchParams.get('workflowId') || undefined
|
||||||
@@ -34,12 +34,25 @@ export async function GET(request: NextRequest) {
|
|||||||
logger.warn(`[${requestId}] Missing credentialId parameter`)
|
logger.warn(`[${requestId}] Missing credentialId parameter`)
|
||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialValidation = validateUUID(credentialId, 'credentialId')
|
||||||
|
if (!credentialValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credentialId format`, { credentialId })
|
||||||
|
return NextResponse.json({ error: credentialValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
if (workflowId) {
|
||||||
|
const workflowValidation = validateUUID(workflowId, 'workflowId')
|
||||||
|
if (!workflowValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid workflowId format`, { workflowId })
|
||||||
|
return NextResponse.json({ error: workflowValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
const authz = await authorizeCredentialUse(request, { credentialId, workflowId })
|
const authz = await authorizeCredentialUse(request, { credentialId, workflowId })
|
||||||
if (!authz.ok || !authz.credentialOwnerUserId) {
|
if (!authz.ok || !authz.credentialOwnerUserId) {
|
||||||
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Refresh access token if needed using the utility function
|
|
||||||
const accessToken = await refreshAccessTokenIfNeeded(
|
const accessToken = await refreshAccessTokenIfNeeded(
|
||||||
credentialId,
|
credentialId,
|
||||||
authz.credentialOwnerUserId,
|
authz.credentialOwnerUserId,
|
||||||
@@ -50,7 +63,6 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch calendars from Google Calendar API
|
|
||||||
logger.info(`[${requestId}] Fetching calendars from Google Calendar API`)
|
logger.info(`[${requestId}] Fetching calendars from Google Calendar API`)
|
||||||
const calendarResponse = await fetch(
|
const calendarResponse = await fetch(
|
||||||
'https://www.googleapis.com/calendar/v3/users/me/calendarList',
|
'https://www.googleapis.com/calendar/v3/users/me/calendarList',
|
||||||
@@ -81,7 +93,6 @@ export async function GET(request: NextRequest) {
|
|||||||
const data = await calendarResponse.json()
|
const data = await calendarResponse.json()
|
||||||
const calendars: CalendarListItem[] = data.items || []
|
const calendars: CalendarListItem[] = data.items || []
|
||||||
|
|
||||||
// Sort calendars with primary first, then alphabetically
|
|
||||||
calendars.sort((a, b) => {
|
calendars.sort((a, b) => {
|
||||||
if (a.primary && !b.primary) return -1
|
if (a.primary && !b.primary) return -1
|
||||||
if (!a.primary && b.primary) return 1
|
if (!a.primary && b.primary) return 1
|
||||||
|
|||||||
@@ -20,6 +20,12 @@ export async function POST(request: Request) {
|
|||||||
cloudId: providedCloudId,
|
cloudId: providedCloudId,
|
||||||
issueType,
|
issueType,
|
||||||
parent,
|
parent,
|
||||||
|
labels,
|
||||||
|
duedate,
|
||||||
|
reporter,
|
||||||
|
environment,
|
||||||
|
customFieldId,
|
||||||
|
customFieldValue,
|
||||||
} = await request.json()
|
} = await request.json()
|
||||||
|
|
||||||
if (!domain) {
|
if (!domain) {
|
||||||
@@ -94,17 +100,57 @@ export async function POST(request: Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (priority !== undefined && priority !== null && priority !== '') {
|
if (priority !== undefined && priority !== null && priority !== '') {
|
||||||
fields.priority = {
|
const isNumericId = /^\d+$/.test(priority)
|
||||||
name: priority,
|
fields.priority = isNumericId ? { id: priority } : { name: priority }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (labels !== undefined && labels !== null && Array.isArray(labels) && labels.length > 0) {
|
||||||
|
fields.labels = labels
|
||||||
|
}
|
||||||
|
|
||||||
|
if (duedate !== undefined && duedate !== null && duedate !== '') {
|
||||||
|
fields.duedate = duedate
|
||||||
|
}
|
||||||
|
|
||||||
|
if (reporter !== undefined && reporter !== null && reporter !== '') {
|
||||||
|
fields.reporter = {
|
||||||
|
id: reporter,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
if (environment !== undefined && environment !== null && environment !== '') {
|
||||||
fields.assignee = {
|
fields.environment = {
|
||||||
id: assignee,
|
type: 'doc',
|
||||||
|
version: 1,
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'paragraph',
|
||||||
|
content: [
|
||||||
|
{
|
||||||
|
type: 'text',
|
||||||
|
text: environment,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
customFieldId !== undefined &&
|
||||||
|
customFieldId !== null &&
|
||||||
|
customFieldId !== '' &&
|
||||||
|
customFieldValue !== undefined &&
|
||||||
|
customFieldValue !== null &&
|
||||||
|
customFieldValue !== ''
|
||||||
|
) {
|
||||||
|
const fieldId = customFieldId.startsWith('customfield_')
|
||||||
|
? customFieldId
|
||||||
|
: `customfield_${customFieldId}`
|
||||||
|
|
||||||
|
fields[fieldId] = customFieldValue
|
||||||
|
}
|
||||||
|
|
||||||
const body = { fields }
|
const body = { fields }
|
||||||
|
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
@@ -132,16 +178,47 @@ export async function POST(request: Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const responseData = await response.json()
|
const responseData = await response.json()
|
||||||
logger.info('Successfully created Jira issue:', responseData.key)
|
const issueKey = responseData.key || 'unknown'
|
||||||
|
logger.info('Successfully created Jira issue:', issueKey)
|
||||||
|
|
||||||
|
let assigneeId: string | undefined
|
||||||
|
if (assignee !== undefined && assignee !== null && assignee !== '') {
|
||||||
|
const assignUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}/assignee`
|
||||||
|
logger.info('Assigning issue to:', assignee)
|
||||||
|
|
||||||
|
const assignResponse = await fetch(assignUrl, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({
|
||||||
|
accountId: assignee,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!assignResponse.ok) {
|
||||||
|
const assignErrorText = await assignResponse.text()
|
||||||
|
logger.warn('Failed to assign issue (issue was created successfully):', {
|
||||||
|
status: assignResponse.status,
|
||||||
|
error: assignErrorText,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
assigneeId = assignee
|
||||||
|
logger.info('Successfully assigned issue to:', assignee)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return NextResponse.json({
|
return NextResponse.json({
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
ts: new Date().toISOString(),
|
ts: new Date().toISOString(),
|
||||||
issueKey: responseData.key || 'unknown',
|
issueKey: issueKey,
|
||||||
summary: responseData.fields?.summary || 'Issue created',
|
summary: responseData.fields?.summary || 'Issue created',
|
||||||
success: true,
|
success: true,
|
||||||
url: `https://${domain}/browse/${responseData.key}`,
|
url: `https://${domain}/browse/${issueKey}`,
|
||||||
|
...(assigneeId && { assigneeId }),
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
} catch (error: any) {
|
} catch (error: any) {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
@@ -23,6 +24,12 @@ export async function POST(request: Request) {
|
|||||||
return NextResponse.json({ error: 'Team ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Team ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const teamIdValidation = validateMicrosoftGraphId(teamId, 'Team ID')
|
||||||
|
if (!teamIdValidation.isValid) {
|
||||||
|
logger.warn('Invalid team ID provided', { teamId, error: teamIdValidation.error })
|
||||||
|
return NextResponse.json({ error: teamIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const authz = await authorizeCredentialUse(request as any, {
|
const authz = await authorizeCredentialUse(request as any, {
|
||||||
credentialId: credential,
|
credentialId: credential,
|
||||||
@@ -70,7 +77,6 @@ export async function POST(request: Request) {
|
|||||||
endpoint: `https://graph.microsoft.com/v1.0/teams/${teamId}/channels`,
|
endpoint: `https://graph.microsoft.com/v1.0/teams/${teamId}/channels`,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Check for auth errors specifically
|
|
||||||
if (response.status === 401) {
|
if (response.status === 401) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -93,7 +99,6 @@ export async function POST(request: Request) {
|
|||||||
} catch (innerError) {
|
} catch (innerError) {
|
||||||
logger.error('Error during API requests:', innerError)
|
logger.error('Error during API requests:', innerError)
|
||||||
|
|
||||||
// Check if it's an authentication error
|
|
||||||
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
||||||
if (
|
if (
|
||||||
errorMessage.includes('auth') ||
|
errorMessage.includes('auth') ||
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
@@ -7,21 +8,35 @@ export const dynamic = 'force-dynamic'
|
|||||||
|
|
||||||
const logger = createLogger('TeamsChatsAPI')
|
const logger = createLogger('TeamsChatsAPI')
|
||||||
|
|
||||||
// Helper function to get chat members and create a meaningful name
|
/**
|
||||||
|
* Helper function to get chat members and create a meaningful name
|
||||||
|
*
|
||||||
|
* @param chatId - Microsoft Teams chat ID to get display name for
|
||||||
|
* @param accessToken - Access token for Microsoft Graph API
|
||||||
|
* @param chatTopic - Optional existing chat topic
|
||||||
|
* @returns A meaningful display name for the chat
|
||||||
|
*/
|
||||||
const getChatDisplayName = async (
|
const getChatDisplayName = async (
|
||||||
chatId: string,
|
chatId: string,
|
||||||
accessToken: string,
|
accessToken: string,
|
||||||
chatTopic?: string
|
chatTopic?: string
|
||||||
): Promise<string> => {
|
): Promise<string> => {
|
||||||
try {
|
try {
|
||||||
// If the chat already has a topic, use it
|
const chatIdValidation = validateMicrosoftGraphId(chatId, 'chatId')
|
||||||
|
if (!chatIdValidation.isValid) {
|
||||||
|
logger.warn('Invalid chat ID in getChatDisplayName', {
|
||||||
|
error: chatIdValidation.error,
|
||||||
|
chatId: chatId.substring(0, 50),
|
||||||
|
})
|
||||||
|
return `Chat ${chatId.substring(0, 8)}...`
|
||||||
|
}
|
||||||
|
|
||||||
if (chatTopic?.trim() && chatTopic !== 'null') {
|
if (chatTopic?.trim() && chatTopic !== 'null') {
|
||||||
return chatTopic
|
return chatTopic
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch chat members to create a meaningful name
|
|
||||||
const membersResponse = await fetch(
|
const membersResponse = await fetch(
|
||||||
`https://graph.microsoft.com/v1.0/chats/${chatId}/members`,
|
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/members`,
|
||||||
{
|
{
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -35,27 +50,25 @@ const getChatDisplayName = async (
|
|||||||
const membersData = await membersResponse.json()
|
const membersData = await membersResponse.json()
|
||||||
const members = membersData.value || []
|
const members = membersData.value || []
|
||||||
|
|
||||||
// Filter out the current user and get display names
|
|
||||||
const memberNames = members
|
const memberNames = members
|
||||||
.filter((member: any) => member.displayName && member.displayName !== 'Unknown')
|
.filter((member: any) => member.displayName && member.displayName !== 'Unknown')
|
||||||
.map((member: any) => member.displayName)
|
.map((member: any) => member.displayName)
|
||||||
.slice(0, 3) // Limit to first 3 names to avoid very long names
|
.slice(0, 3)
|
||||||
|
|
||||||
if (memberNames.length > 0) {
|
if (memberNames.length > 0) {
|
||||||
if (memberNames.length === 1) {
|
if (memberNames.length === 1) {
|
||||||
return memberNames[0] // 1:1 chat
|
return memberNames[0]
|
||||||
}
|
}
|
||||||
if (memberNames.length === 2) {
|
if (memberNames.length === 2) {
|
||||||
return memberNames.join(' & ') // 2-person group
|
return memberNames.join(' & ')
|
||||||
}
|
}
|
||||||
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more` // Larger group
|
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more`
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback: try to get a better name from recent messages
|
|
||||||
try {
|
try {
|
||||||
const messagesResponse = await fetch(
|
const messagesResponse = await fetch(
|
||||||
`https://graph.microsoft.com/v1.0/chats/${chatId}/messages?$top=10&$orderby=createdDateTime desc`,
|
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/messages?$top=10&$orderby=createdDateTime desc`,
|
||||||
{
|
{
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -69,14 +82,12 @@ const getChatDisplayName = async (
|
|||||||
const messagesData = await messagesResponse.json()
|
const messagesData = await messagesResponse.json()
|
||||||
const messages = messagesData.value || []
|
const messages = messagesData.value || []
|
||||||
|
|
||||||
// Look for chat rename events
|
|
||||||
for (const message of messages) {
|
for (const message of messages) {
|
||||||
if (message.eventDetail?.chatDisplayName) {
|
if (message.eventDetail?.chatDisplayName) {
|
||||||
return message.eventDetail.chatDisplayName
|
return message.eventDetail.chatDisplayName
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get unique sender names from recent messages as last resort
|
|
||||||
const senderNames = [
|
const senderNames = [
|
||||||
...new Set(
|
...new Set(
|
||||||
messages
|
messages
|
||||||
@@ -103,7 +114,6 @@ const getChatDisplayName = async (
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Final fallback
|
|
||||||
return `Chat ${chatId.split(':')[0] || chatId.substring(0, 8)}...`
|
return `Chat ${chatId.split(':')[0] || chatId.substring(0, 8)}...`
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.warn(
|
logger.warn(
|
||||||
@@ -146,7 +156,6 @@ export async function POST(request: Request) {
|
|||||||
return NextResponse.json({ error: 'Could not retrieve access token' }, { status: 401 })
|
return NextResponse.json({ error: 'Could not retrieve access token' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Now try to fetch the chats
|
|
||||||
const response = await fetch('https://graph.microsoft.com/v1.0/me/chats', {
|
const response = await fetch('https://graph.microsoft.com/v1.0/me/chats', {
|
||||||
method: 'GET',
|
method: 'GET',
|
||||||
headers: {
|
headers: {
|
||||||
@@ -163,7 +172,6 @@ export async function POST(request: Request) {
|
|||||||
endpoint: 'https://graph.microsoft.com/v1.0/me/chats',
|
endpoint: 'https://graph.microsoft.com/v1.0/me/chats',
|
||||||
})
|
})
|
||||||
|
|
||||||
// Check for auth errors specifically
|
|
||||||
if (response.status === 401) {
|
if (response.status === 401) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -179,7 +187,6 @@ export async function POST(request: Request) {
|
|||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
|
|
||||||
// Process chats with enhanced display names
|
|
||||||
const chats = await Promise.all(
|
const chats = await Promise.all(
|
||||||
data.value.map(async (chat: any) => ({
|
data.value.map(async (chat: any) => ({
|
||||||
id: chat.id,
|
id: chat.id,
|
||||||
@@ -193,7 +200,6 @@ export async function POST(request: Request) {
|
|||||||
} catch (innerError) {
|
} catch (innerError) {
|
||||||
logger.error('Error during API requests:', innerError)
|
logger.error('Error during API requests:', innerError)
|
||||||
|
|
||||||
// Check if it's an authentication error
|
|
||||||
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
||||||
if (
|
if (
|
||||||
errorMessage.includes('auth') ||
|
errorMessage.includes('auth') ||
|
||||||
|
|||||||
@@ -30,23 +30,41 @@ export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
|
|||||||
return client
|
return client
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Recursively checks an object for dangerous MongoDB operators
|
||||||
|
* @param obj - The object to check
|
||||||
|
* @param dangerousOperators - Array of operator names to block
|
||||||
|
* @returns true if a dangerous operator is found
|
||||||
|
*/
|
||||||
|
function containsDangerousOperator(obj: unknown, dangerousOperators: string[]): boolean {
|
||||||
|
if (typeof obj !== 'object' || obj === null) return false
|
||||||
|
|
||||||
|
for (const key of Object.keys(obj as Record<string, unknown>)) {
|
||||||
|
if (dangerousOperators.includes(key)) return true
|
||||||
|
if (
|
||||||
|
typeof (obj as Record<string, unknown>)[key] === 'object' &&
|
||||||
|
containsDangerousOperator((obj as Record<string, unknown>)[key], dangerousOperators)
|
||||||
|
) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
export function validateFilter(filter: string): { isValid: boolean; error?: string } {
|
export function validateFilter(filter: string): { isValid: boolean; error?: string } {
|
||||||
try {
|
try {
|
||||||
const parsed = JSON.parse(filter)
|
const parsed = JSON.parse(filter)
|
||||||
|
|
||||||
const dangerousOperators = ['$where', '$regex', '$expr', '$function', '$accumulator', '$let']
|
const dangerousOperators = [
|
||||||
|
'$where', // Executes arbitrary JavaScript
|
||||||
|
'$regex', // Can cause ReDoS attacks
|
||||||
|
'$expr', // Expression evaluation
|
||||||
|
'$function', // Custom JavaScript functions
|
||||||
|
'$accumulator', // Custom JavaScript accumulators
|
||||||
|
'$let', // Variable definitions that could be exploited
|
||||||
|
]
|
||||||
|
|
||||||
const checkForDangerousOps = (obj: any): boolean => {
|
if (containsDangerousOperator(parsed, dangerousOperators)) {
|
||||||
if (typeof obj !== 'object' || obj === null) return false
|
|
||||||
|
|
||||||
for (const key of Object.keys(obj)) {
|
|
||||||
if (dangerousOperators.includes(key)) return true
|
|
||||||
if (typeof obj[key] === 'object' && checkForDangerousOps(obj[key])) return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
if (checkForDangerousOps(parsed)) {
|
|
||||||
return {
|
return {
|
||||||
isValid: false,
|
isValid: false,
|
||||||
error: 'Filter contains potentially dangerous operators',
|
error: 'Filter contains potentially dangerous operators',
|
||||||
@@ -74,29 +92,19 @@ export function validatePipeline(pipeline: string): { isValid: boolean; error?:
|
|||||||
}
|
}
|
||||||
|
|
||||||
const dangerousOperators = [
|
const dangerousOperators = [
|
||||||
'$where',
|
'$where', // Executes arbitrary JavaScript
|
||||||
'$function',
|
'$function', // Custom JavaScript functions
|
||||||
'$accumulator',
|
'$accumulator', // Custom JavaScript accumulators
|
||||||
'$let',
|
'$let', // Variable definitions that could be exploited
|
||||||
'$merge',
|
'$merge', // Writes to external collections
|
||||||
'$out',
|
'$out', // Writes to external collections
|
||||||
'$currentOp',
|
'$currentOp', // Exposes system operation info
|
||||||
'$listSessions',
|
'$listSessions', // Exposes session info
|
||||||
'$listLocalSessions',
|
'$listLocalSessions', // Exposes local session info
|
||||||
]
|
]
|
||||||
|
|
||||||
const checkPipelineStage = (stage: any): boolean => {
|
|
||||||
if (typeof stage !== 'object' || stage === null) return false
|
|
||||||
|
|
||||||
for (const key of Object.keys(stage)) {
|
|
||||||
if (dangerousOperators.includes(key)) return true
|
|
||||||
if (typeof stage[key] === 'object' && checkPipelineStage(stage[key])) return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const stage of parsed) {
|
for (const stage of parsed) {
|
||||||
if (checkPipelineStage(stage)) {
|
if (containsDangerousOperator(stage, dangerousOperators)) {
|
||||||
return {
|
return {
|
||||||
isValid: false,
|
isValid: false,
|
||||||
error: 'Pipeline contains potentially dangerous operators',
|
error: 'Pipeline contains potentially dangerous operators',
|
||||||
|
|||||||
@@ -98,15 +98,45 @@ export function buildDeleteQuery(table: string, where: string) {
|
|||||||
return { query, values: [] }
|
return { query, values: [] }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a WHERE clause to prevent SQL injection attacks
|
||||||
|
* @param where - The WHERE clause string to validate
|
||||||
|
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
|
||||||
|
*/
|
||||||
function validateWhereClause(where: string): void {
|
function validateWhereClause(where: string): void {
|
||||||
const dangerousPatterns = [
|
const dangerousPatterns = [
|
||||||
|
// DDL and DML injection via stacked queries
|
||||||
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
||||||
/union\s+select/i,
|
// Union-based injection
|
||||||
|
/union\s+(all\s+)?select/i,
|
||||||
|
// File operations
|
||||||
/into\s+outfile/i,
|
/into\s+outfile/i,
|
||||||
/load_file/i,
|
/into\s+dumpfile/i,
|
||||||
|
/load_file\s*\(/i,
|
||||||
|
// Comment-based injection (can truncate query)
|
||||||
/--/,
|
/--/,
|
||||||
/\/\*/,
|
/\/\*/,
|
||||||
/\*\//,
|
/\*\//,
|
||||||
|
// Tautologies - always true/false conditions using backreferences
|
||||||
|
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
|
||||||
|
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||||
|
/\bor\s+true\b/i,
|
||||||
|
/\bor\s+false\b/i,
|
||||||
|
// AND tautologies (less common but still used in attacks)
|
||||||
|
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||||
|
/\band\s+true\b/i,
|
||||||
|
/\band\s+false\b/i,
|
||||||
|
// Time-based blind injection
|
||||||
|
/\bsleep\s*\(/i,
|
||||||
|
/\bbenchmark\s*\(/i,
|
||||||
|
/\bwaitfor\s+delay/i,
|
||||||
|
// Stacked queries (any statement after semicolon)
|
||||||
|
/;\s*\w+/,
|
||||||
|
// Information schema queries
|
||||||
|
/information_schema/i,
|
||||||
|
/mysql\./i,
|
||||||
|
// System functions and procedures
|
||||||
|
/\bxp_cmdshell/i,
|
||||||
]
|
]
|
||||||
|
|
||||||
for (const pattern of dangerousPatterns) {
|
for (const pattern of dangerousPatterns) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
@@ -36,6 +37,12 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`[${requestId}] Fetching credential`, { credentialId })
|
logger.info(`[${requestId}] Fetching credential`, { credentialId })
|
||||||
|
|
||||||
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
@@ -33,6 +34,12 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
if (!credentials.length) {
|
if (!credentials.length) {
|
||||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||||
@@ -48,7 +55,6 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build URL for OneDrive folders
|
|
||||||
let url = `https://graph.microsoft.com/v1.0/me/drive/root/children?$filter=folder ne null&$select=id,name,folder,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
|
let url = `https://graph.microsoft.com/v1.0/me/drive/root/children?$filter=folder ne null&$select=id,name,folder,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
|
||||||
|
|
||||||
if (query) {
|
if (query) {
|
||||||
@@ -71,7 +77,7 @@ export async function GET(request: NextRequest) {
|
|||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
const folders = (data.value || [])
|
const folders = (data.value || [])
|
||||||
.filter((item: MicrosoftGraphDriveItem) => item.folder) // Only folders
|
.filter((item: MicrosoftGraphDriveItem) => item.folder)
|
||||||
.map((folder: MicrosoftGraphDriveItem) => ({
|
.map((folder: MicrosoftGraphDriveItem) => ({
|
||||||
id: folder.id,
|
id: folder.id,
|
||||||
name: folder.name,
|
name: folder.name,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
|||||||
import * as XLSX from 'xlsx'
|
import * as XLSX from 'xlsx'
|
||||||
import { z } from 'zod'
|
import { z } from 'zod'
|
||||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||||
|
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import {
|
import {
|
||||||
@@ -28,9 +29,9 @@ const ExcelValuesSchema = z.union([
|
|||||||
const OneDriveUploadSchema = z.object({
|
const OneDriveUploadSchema = z.object({
|
||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
fileName: z.string().min(1, 'File name is required'),
|
fileName: z.string().min(1, 'File name is required'),
|
||||||
file: z.any().optional(), // UserFile object (optional for blank Excel creation)
|
file: z.any().optional(),
|
||||||
folderId: z.string().optional().nullable(),
|
folderId: z.string().optional().nullable(),
|
||||||
mimeType: z.string().nullish(), // Accept string, null, or undefined
|
mimeType: z.string().nullish(),
|
||||||
values: ExcelValuesSchema.optional().nullable(),
|
values: ExcelValuesSchema.optional().nullable(),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -62,24 +63,19 @@ export async function POST(request: NextRequest) {
|
|||||||
let fileBuffer: Buffer
|
let fileBuffer: Buffer
|
||||||
let mimeType: string
|
let mimeType: string
|
||||||
|
|
||||||
// Check if we're creating a blank Excel file
|
|
||||||
const isExcelCreation =
|
const isExcelCreation =
|
||||||
validatedData.mimeType ===
|
validatedData.mimeType ===
|
||||||
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' && !validatedData.file
|
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' && !validatedData.file
|
||||||
|
|
||||||
if (isExcelCreation) {
|
if (isExcelCreation) {
|
||||||
// Create a blank Excel workbook
|
|
||||||
|
|
||||||
const workbook = XLSX.utils.book_new()
|
const workbook = XLSX.utils.book_new()
|
||||||
const worksheet = XLSX.utils.aoa_to_sheet([[]])
|
const worksheet = XLSX.utils.aoa_to_sheet([[]])
|
||||||
XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1')
|
XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1')
|
||||||
|
|
||||||
// Generate XLSX file as buffer
|
|
||||||
const xlsxBuffer = XLSX.write(workbook, { type: 'buffer', bookType: 'xlsx' })
|
const xlsxBuffer = XLSX.write(workbook, { type: 'buffer', bookType: 'xlsx' })
|
||||||
fileBuffer = Buffer.from(xlsxBuffer)
|
fileBuffer = Buffer.from(xlsxBuffer)
|
||||||
mimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
mimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
|
||||||
} else {
|
} else {
|
||||||
// Handle regular file upload
|
|
||||||
const rawFile = validatedData.file
|
const rawFile = validatedData.file
|
||||||
|
|
||||||
if (!rawFile) {
|
if (!rawFile) {
|
||||||
@@ -108,7 +104,6 @@ export async function POST(request: NextRequest) {
|
|||||||
fileToProcess = rawFile
|
fileToProcess = rawFile
|
||||||
}
|
}
|
||||||
|
|
||||||
// Convert to UserFile format
|
|
||||||
let userFile
|
let userFile
|
||||||
try {
|
try {
|
||||||
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
|
||||||
@@ -138,7 +133,7 @@ export async function POST(request: NextRequest) {
|
|||||||
mimeType = userFile.type || 'application/octet-stream'
|
mimeType = userFile.type || 'application/octet-stream'
|
||||||
}
|
}
|
||||||
|
|
||||||
const maxSize = 250 * 1024 * 1024 // 250MB
|
const maxSize = 250 * 1024 * 1024
|
||||||
if (fileBuffer.length > maxSize) {
|
if (fileBuffer.length > maxSize) {
|
||||||
const sizeMB = (fileBuffer.length / (1024 * 1024)).toFixed(2)
|
const sizeMB = (fileBuffer.length / (1024 * 1024)).toFixed(2)
|
||||||
logger.warn(`[${requestId}] File too large: ${sizeMB}MB`)
|
logger.warn(`[${requestId}] File too large: ${sizeMB}MB`)
|
||||||
@@ -151,7 +146,6 @@ export async function POST(request: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure file name has an appropriate extension
|
|
||||||
let fileName = validatedData.fileName
|
let fileName = validatedData.fileName
|
||||||
const hasExtension = fileName.includes('.') && fileName.lastIndexOf('.') > 0
|
const hasExtension = fileName.includes('.') && fileName.lastIndexOf('.') > 0
|
||||||
|
|
||||||
@@ -169,6 +163,17 @@ export async function POST(request: NextRequest) {
|
|||||||
const folderId = validatedData.folderId?.trim()
|
const folderId = validatedData.folderId?.trim()
|
||||||
|
|
||||||
if (folderId && folderId !== '') {
|
if (folderId && folderId !== '') {
|
||||||
|
const folderIdValidation = validateMicrosoftGraphId(folderId, 'folderId')
|
||||||
|
if (!folderIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid folder ID`, { error: folderIdValidation.error })
|
||||||
|
return NextResponse.json(
|
||||||
|
{
|
||||||
|
success: false,
|
||||||
|
error: folderIdValidation.error,
|
||||||
|
},
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(folderId)}:/${encodeURIComponent(fileName)}:/content`
|
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(folderId)}:/${encodeURIComponent(fileName)}:/content`
|
||||||
} else {
|
} else {
|
||||||
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
|
||||||
@@ -197,14 +202,12 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const fileData = await uploadResponse.json()
|
const fileData = await uploadResponse.json()
|
||||||
|
|
||||||
// If this is an Excel creation and values were provided, write them using the Excel API
|
|
||||||
let excelWriteResult: any | undefined
|
let excelWriteResult: any | undefined
|
||||||
const shouldWriteExcelContent =
|
const shouldWriteExcelContent =
|
||||||
isExcelCreation && Array.isArray(excelValues) && excelValues.length > 0
|
isExcelCreation && Array.isArray(excelValues) && excelValues.length > 0
|
||||||
|
|
||||||
if (shouldWriteExcelContent) {
|
if (shouldWriteExcelContent) {
|
||||||
try {
|
try {
|
||||||
// Create a workbook session to ensure reliability and persistence of changes
|
|
||||||
let workbookSessionId: string | undefined
|
let workbookSessionId: string | undefined
|
||||||
const sessionResp = await fetch(
|
const sessionResp = await fetch(
|
||||||
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
|
||||||
@@ -223,7 +226,6 @@ export async function POST(request: NextRequest) {
|
|||||||
workbookSessionId = sessionData?.id
|
workbookSessionId = sessionData?.id
|
||||||
}
|
}
|
||||||
|
|
||||||
// Determine the first worksheet name
|
|
||||||
let sheetName = 'Sheet1'
|
let sheetName = 'Sheet1'
|
||||||
try {
|
try {
|
||||||
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
|
||||||
@@ -272,7 +274,6 @@ export async function POST(request: NextRequest) {
|
|||||||
return paddedRow
|
return paddedRow
|
||||||
})
|
})
|
||||||
|
|
||||||
// Compute concise end range from A1 and matrix size (no network round-trip)
|
|
||||||
const indexToColLetters = (index: number): string => {
|
const indexToColLetters = (index: number): string => {
|
||||||
let n = index
|
let n = index
|
||||||
let s = ''
|
let s = ''
|
||||||
@@ -313,7 +314,6 @@ export async function POST(request: NextRequest) {
|
|||||||
statusText: excelWriteResponse?.statusText,
|
statusText: excelWriteResponse?.statusText,
|
||||||
error: errorText,
|
error: errorText,
|
||||||
})
|
})
|
||||||
// Do not fail the entire request; return upload success with write error details
|
|
||||||
excelWriteResult = {
|
excelWriteResult = {
|
||||||
success: false,
|
success: false,
|
||||||
error: `Excel write failed: ${excelWriteResponse?.statusText || 'unknown'}`,
|
error: `Excel write failed: ${excelWriteResponse?.statusText || 'unknown'}`,
|
||||||
@@ -321,7 +321,6 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const writeData = await excelWriteResponse.json()
|
const writeData = await excelWriteResponse.json()
|
||||||
// The Range PATCH returns a Range object; log address and values length
|
|
||||||
const addr = writeData.address || writeData.addressLocal
|
const addr = writeData.address || writeData.addressLocal
|
||||||
const v = writeData.values || []
|
const v = writeData.values || []
|
||||||
excelWriteResult = {
|
excelWriteResult = {
|
||||||
@@ -333,7 +332,6 @@ export async function POST(request: NextRequest) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Attempt to close the workbook session if one was created
|
|
||||||
if (workbookSessionId) {
|
if (workbookSessionId) {
|
||||||
try {
|
try {
|
||||||
const closeResp = await fetch(
|
const closeResp = await fetch(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -29,8 +30,13 @@ export async function GET(request: Request) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId')
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn('Invalid credentialId format', { error: credentialIdValidation.error })
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Ensure we have a session for permission checks
|
|
||||||
const sessionUserId = session?.user?.id || ''
|
const sessionUserId = session?.user?.id || ''
|
||||||
|
|
||||||
if (!sessionUserId) {
|
if (!sessionUserId) {
|
||||||
@@ -38,7 +44,6 @@ export async function GET(request: Request) {
|
|||||||
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve the credential owner to support collaborator-owned credentials
|
|
||||||
const creds = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const creds = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
if (!creds.length) {
|
if (!creds.length) {
|
||||||
logger.warn('Credential not found', { credentialId })
|
logger.warn('Credential not found', { credentialId })
|
||||||
@@ -79,7 +84,6 @@ export async function GET(request: Request) {
|
|||||||
endpoint: 'https://graph.microsoft.com/v1.0/me/mailFolders',
|
endpoint: 'https://graph.microsoft.com/v1.0/me/mailFolders',
|
||||||
})
|
})
|
||||||
|
|
||||||
// Check for auth errors specifically
|
|
||||||
if (response.status === 401) {
|
if (response.status === 401) {
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
@@ -96,7 +100,6 @@ export async function GET(request: Request) {
|
|||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
const folders = data.value || []
|
const folders = data.value || []
|
||||||
|
|
||||||
// Transform folders to match the expected format
|
|
||||||
const transformedFolders = folders.map((folder: OutlookFolder) => ({
|
const transformedFolders = folders.map((folder: OutlookFolder) => ({
|
||||||
id: folder.id,
|
id: folder.id,
|
||||||
name: folder.displayName,
|
name: folder.displayName,
|
||||||
@@ -111,7 +114,6 @@ export async function GET(request: Request) {
|
|||||||
} catch (innerError) {
|
} catch (innerError) {
|
||||||
logger.error('Error during API requests:', innerError)
|
logger.error('Error during API requests:', innerError)
|
||||||
|
|
||||||
// Check if it's an authentication error
|
|
||||||
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
|
||||||
if (
|
if (
|
||||||
errorMessage.includes('auth') ||
|
errorMessage.includes('auth') ||
|
||||||
|
|||||||
@@ -64,15 +64,46 @@ export function sanitizeIdentifier(identifier: string): string {
|
|||||||
return sanitizeSingleIdentifier(identifier)
|
return sanitizeSingleIdentifier(identifier)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a WHERE clause to prevent SQL injection attacks
|
||||||
|
* @param where - The WHERE clause string to validate
|
||||||
|
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
|
||||||
|
*/
|
||||||
function validateWhereClause(where: string): void {
|
function validateWhereClause(where: string): void {
|
||||||
const dangerousPatterns = [
|
const dangerousPatterns = [
|
||||||
|
// DDL and DML injection via stacked queries
|
||||||
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
|
||||||
/union\s+select/i,
|
// Union-based injection
|
||||||
|
/union\s+(all\s+)?select/i,
|
||||||
|
// File operations
|
||||||
/into\s+outfile/i,
|
/into\s+outfile/i,
|
||||||
/load_file/i,
|
/load_file\s*\(/i,
|
||||||
|
/pg_read_file/i,
|
||||||
|
// Comment-based injection (can truncate query)
|
||||||
/--/,
|
/--/,
|
||||||
/\/\*/,
|
/\/\*/,
|
||||||
/\*\//,
|
/\*\//,
|
||||||
|
// Tautologies - always true/false conditions using backreferences
|
||||||
|
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
|
||||||
|
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||||
|
/\bor\s+true\b/i,
|
||||||
|
/\bor\s+false\b/i,
|
||||||
|
// AND tautologies (less common but still used in attacks)
|
||||||
|
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
|
||||||
|
/\band\s+true\b/i,
|
||||||
|
/\band\s+false\b/i,
|
||||||
|
// Time-based blind injection
|
||||||
|
/\bsleep\s*\(/i,
|
||||||
|
/\bwaitfor\s+delay/i,
|
||||||
|
/\bpg_sleep\s*\(/i,
|
||||||
|
/\bbenchmark\s*\(/i,
|
||||||
|
// Stacked queries (any statement after semicolon)
|
||||||
|
/;\s*\w+/,
|
||||||
|
// Information schema / system catalog queries
|
||||||
|
/information_schema/i,
|
||||||
|
/pg_catalog/i,
|
||||||
|
// System functions and procedures
|
||||||
|
/\bxp_cmdshell/i,
|
||||||
]
|
]
|
||||||
|
|
||||||
for (const pattern of dangerousPatterns) {
|
for (const pattern of dangerousPatterns) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
import type { SharepointSite } from '@/tools/sharepoint/types'
|
import type { SharepointSite } from '@/tools/sharepoint/types'
|
||||||
@@ -32,6 +33,12 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId', 255)
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
if (!credentials.length) {
|
if (!credentials.length) {
|
||||||
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
|
||||||
@@ -47,8 +54,6 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build URL for SharePoint sites
|
|
||||||
// Use search=* to get all sites the user has access to, or search for specific query
|
|
||||||
const searchQuery = query || '*'
|
const searchQuery = query || '*'
|
||||||
const url = `https://graph.microsoft.com/v1.0/sites?search=${encodeURIComponent(searchQuery)}&$select=id,name,displayName,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
|
const url = `https://graph.microsoft.com/v1.0/sites?search=${encodeURIComponent(searchQuery)}&$select=id,name,displayName,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -93,7 +94,6 @@ export async function POST(request: Request) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter to channels the bot can access and format the response
|
|
||||||
const channels = (data.channels || [])
|
const channels = (data.channels || [])
|
||||||
.filter((channel: SlackChannel) => {
|
.filter((channel: SlackChannel) => {
|
||||||
const canAccess = !channel.is_archived && (channel.is_member || !channel.is_private)
|
const canAccess = !channel.is_archived && (channel.is_member || !channel.is_private)
|
||||||
@@ -106,6 +106,28 @@ export async function POST(request: Request) {
|
|||||||
|
|
||||||
return canAccess
|
return canAccess
|
||||||
})
|
})
|
||||||
|
.filter((channel: SlackChannel) => {
|
||||||
|
const validation = validateAlphanumericId(channel.id, 'channelId', 50)
|
||||||
|
|
||||||
|
if (!validation.isValid) {
|
||||||
|
logger.warn('Invalid channel ID received from Slack API', {
|
||||||
|
channelId: channel.id,
|
||||||
|
channelName: channel.name,
|
||||||
|
error: validation.error,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!/^[CDG][A-Z0-9]+$/i.test(channel.id)) {
|
||||||
|
logger.warn('Channel ID does not match Slack format', {
|
||||||
|
channelId: channel.id,
|
||||||
|
channelName: channel.name,
|
||||||
|
})
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return true
|
||||||
|
})
|
||||||
.map((channel: SlackChannel) => ({
|
.map((channel: SlackChannel) => ({
|
||||||
id: channel.id,
|
id: channel.id,
|
||||||
name: channel.name,
|
name: channel.name,
|
||||||
|
|||||||
@@ -14,7 +14,12 @@ const SlackReadMessagesSchema = z
|
|||||||
accessToken: z.string().min(1, 'Access token is required'),
|
accessToken: z.string().min(1, 'Access token is required'),
|
||||||
channel: z.string().optional().nullable(),
|
channel: z.string().optional().nullable(),
|
||||||
userId: z.string().optional().nullable(),
|
userId: z.string().optional().nullable(),
|
||||||
limit: z.number().optional().nullable(),
|
limit: z.coerce
|
||||||
|
.number()
|
||||||
|
.min(1, 'Limit must be at least 1')
|
||||||
|
.max(15, 'Limit cannot exceed 15')
|
||||||
|
.optional()
|
||||||
|
.nullable(),
|
||||||
oldest: z.string().optional().nullable(),
|
oldest: z.string().optional().nullable(),
|
||||||
latest: z.string().optional().nullable(),
|
latest: z.string().optional().nullable(),
|
||||||
})
|
})
|
||||||
@@ -62,8 +67,8 @@ export async function POST(request: NextRequest) {
|
|||||||
|
|
||||||
const url = new URL('https://slack.com/api/conversations.history')
|
const url = new URL('https://slack.com/api/conversations.history')
|
||||||
url.searchParams.append('channel', channel!)
|
url.searchParams.append('channel', channel!)
|
||||||
const limit = validatedData.limit ? Number(validatedData.limit) : 10
|
const limit = validatedData.limit ?? 10
|
||||||
url.searchParams.append('limit', String(Math.min(limit, 15)))
|
url.searchParams.append('limit', String(limit))
|
||||||
|
|
||||||
if (validatedData.oldest) {
|
if (validatedData.oldest) {
|
||||||
url.searchParams.append('oldest', validatedData.oldest)
|
url.searchParams.append('oldest', validatedData.oldest)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -20,13 +21,21 @@ export async function POST(request: Request) {
|
|||||||
try {
|
try {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { credential, workflowId } = body
|
const { credential, workflowId, userId } = body
|
||||||
|
|
||||||
if (!credential) {
|
if (!credential) {
|
||||||
logger.error('Missing credential in request')
|
logger.error('Missing credential in request')
|
||||||
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (userId !== undefined && userId !== null) {
|
||||||
|
const validation = validateAlphanumericId(userId, 'userId', 100)
|
||||||
|
if (!validation.isValid) {
|
||||||
|
logger.warn('Invalid Slack user ID', { userId, error: validation.error })
|
||||||
|
return NextResponse.json({ error: validation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let accessToken: string
|
let accessToken: string
|
||||||
const isBotToken = credential.startsWith('xoxb-')
|
const isBotToken = credential.startsWith('xoxb-')
|
||||||
|
|
||||||
@@ -63,6 +72,17 @@ export async function POST(request: Request) {
|
|||||||
logger.info('Using OAuth token for Slack API')
|
logger.info('Using OAuth token for Slack API')
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (userId) {
|
||||||
|
const userData = await fetchSlackUser(accessToken, userId)
|
||||||
|
const user = {
|
||||||
|
id: userData.user.id,
|
||||||
|
name: userData.user.name,
|
||||||
|
real_name: userData.user.real_name || userData.user.name,
|
||||||
|
}
|
||||||
|
logger.info(`Successfully fetched Slack user: ${userId}`)
|
||||||
|
return NextResponse.json({ user })
|
||||||
|
}
|
||||||
|
|
||||||
const data = await fetchSlackUsers(accessToken)
|
const data = await fetchSlackUsers(accessToken)
|
||||||
|
|
||||||
const users = (data.members || [])
|
const users = (data.members || [])
|
||||||
@@ -87,6 +107,31 @@ export async function POST(request: Request) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function fetchSlackUser(accessToken: string, userId: string) {
|
||||||
|
const url = new URL('https://slack.com/api/users.info')
|
||||||
|
url.searchParams.append('user', userId)
|
||||||
|
|
||||||
|
const response = await fetch(url.toString(), {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${accessToken}`,
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Slack API error: ${response.status} ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
if (!data.ok) {
|
||||||
|
throw new Error(data.error || 'Failed to fetch user')
|
||||||
|
}
|
||||||
|
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
async function fetchSlackUsers(accessToken: string) {
|
async function fetchSlackUsers(accessToken: string) {
|
||||||
const url = new URL('https://slack.com/api/users.list')
|
const url = new URL('https://slack.com/api/users.list')
|
||||||
url.searchParams.append('limit', '200')
|
url.searchParams.append('limit', '200')
|
||||||
|
|||||||
@@ -1,4 +1,7 @@
|
|||||||
import { type Attributes, Client, type ConnectConfig } from 'ssh2'
|
import { type Attributes, Client, type ConnectConfig } from 'ssh2'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
|
const logger = createLogger('SSHUtils')
|
||||||
|
|
||||||
// File type constants from POSIX
|
// File type constants from POSIX
|
||||||
const S_IFMT = 0o170000 // bit mask for the file type bit field
|
const S_IFMT = 0o170000 // bit mask for the file type bit field
|
||||||
@@ -32,7 +35,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
const host = config.host
|
const host = config.host
|
||||||
const port = config.port
|
const port = config.port
|
||||||
|
|
||||||
// Connection refused - server not running or wrong port
|
|
||||||
if (errorMessage.includes('econnrefused') || errorMessage.includes('connection refused')) {
|
if (errorMessage.includes('econnrefused') || errorMessage.includes('connection refused')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Connection refused to ${host}:${port}. ` +
|
`Connection refused to ${host}:${port}. ` +
|
||||||
@@ -42,7 +44,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Connection reset - server closed connection unexpectedly
|
|
||||||
if (errorMessage.includes('econnreset') || errorMessage.includes('connection reset')) {
|
if (errorMessage.includes('econnreset') || errorMessage.includes('connection reset')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Connection reset by ${host}:${port}. ` +
|
`Connection reset by ${host}:${port}. ` +
|
||||||
@@ -53,7 +54,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Timeout - server unreachable or slow
|
|
||||||
if (errorMessage.includes('etimedout') || errorMessage.includes('timeout')) {
|
if (errorMessage.includes('etimedout') || errorMessage.includes('timeout')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Connection timed out to ${host}:${port}. ` +
|
`Connection timed out to ${host}:${port}. ` +
|
||||||
@@ -63,7 +63,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// DNS/hostname resolution
|
|
||||||
if (errorMessage.includes('enotfound') || errorMessage.includes('getaddrinfo')) {
|
if (errorMessage.includes('enotfound') || errorMessage.includes('getaddrinfo')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Could not resolve hostname "${host}". ` +
|
`Could not resolve hostname "${host}". ` +
|
||||||
@@ -71,7 +70,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Authentication failure
|
|
||||||
if (errorMessage.includes('authentication') || errorMessage.includes('auth')) {
|
if (errorMessage.includes('authentication') || errorMessage.includes('auth')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Authentication failed for user on ${host}:${port}. ` +
|
`Authentication failed for user on ${host}:${port}. ` +
|
||||||
@@ -81,7 +79,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Private key format issues
|
|
||||||
if (
|
if (
|
||||||
errorMessage.includes('key') &&
|
errorMessage.includes('key') &&
|
||||||
(errorMessage.includes('parse') || errorMessage.includes('invalid'))
|
(errorMessage.includes('parse') || errorMessage.includes('invalid'))
|
||||||
@@ -93,7 +90,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Host key verification (first connection)
|
|
||||||
if (errorMessage.includes('host key') || errorMessage.includes('hostkey')) {
|
if (errorMessage.includes('host key') || errorMessage.includes('hostkey')) {
|
||||||
return new Error(
|
return new Error(
|
||||||
`Host key verification issue for ${host}. ` +
|
`Host key verification issue for ${host}. ` +
|
||||||
@@ -101,7 +97,6 @@ function formatSSHError(err: Error, config: { host: string; port: number }): Err
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return original error with context if no specific match
|
|
||||||
return new Error(`SSH connection to ${host}:${port} failed: ${err.message}`)
|
return new Error(`SSH connection to ${host}:${port} failed: ${err.message}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -205,19 +200,119 @@ export function executeSSHCommand(client: Client, command: string): Promise<SSHC
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* Sanitize command input to prevent command injection
|
* Sanitize command input to prevent command injection
|
||||||
|
*
|
||||||
|
* Removes null bytes and other dangerous control characters while preserving
|
||||||
|
* legitimate shell syntax. Logs warnings for potentially dangerous patterns.
|
||||||
|
*
|
||||||
|
* Note: This function does not block complex shell commands (pipes, redirects, etc.)
|
||||||
|
* as users legitimately need these features for remote command execution.
|
||||||
|
*
|
||||||
|
* @param command - The command to sanitize
|
||||||
|
* @returns The sanitized command string
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const safeCommand = sanitizeCommand(userInput)
|
||||||
|
* // Use safeCommand for SSH execution
|
||||||
|
* ```
|
||||||
*/
|
*/
|
||||||
export function sanitizeCommand(command: string): string {
|
export function sanitizeCommand(command: string): string {
|
||||||
return command.trim()
|
let sanitized = command.replace(/\0/g, '')
|
||||||
|
|
||||||
|
sanitized = sanitized.replace(/[\x0B\x0C]/g, '')
|
||||||
|
|
||||||
|
sanitized = sanitized.trim()
|
||||||
|
|
||||||
|
const dangerousPatterns = [
|
||||||
|
{ pattern: /\$\(.*\)/, name: 'command substitution $()' },
|
||||||
|
{ pattern: /`.*`/, name: 'backtick command substitution' },
|
||||||
|
{ pattern: /;\s*rm\s+-rf/i, name: 'destructive rm -rf command' },
|
||||||
|
{ pattern: /;\s*dd\s+/i, name: 'dd command (disk operations)' },
|
||||||
|
{ pattern: /mkfs/i, name: 'filesystem formatting command' },
|
||||||
|
{ pattern: />\s*\/dev\/sd[a-z]/i, name: 'direct disk write' },
|
||||||
|
]
|
||||||
|
|
||||||
|
for (const { pattern, name } of dangerousPatterns) {
|
||||||
|
if (pattern.test(sanitized)) {
|
||||||
|
logger.warn(`Command contains ${name}`, {
|
||||||
|
command: sanitized.substring(0, 100) + (sanitized.length > 100 ? '...' : ''),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return sanitized
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sanitize file path - removes null bytes and trims whitespace
|
* Sanitize and validate file path to prevent path traversal attacks
|
||||||
|
*
|
||||||
|
* This function validates that a file path does not contain:
|
||||||
|
* - Null bytes
|
||||||
|
* - Path traversal sequences (.. or ../)
|
||||||
|
* - URL-encoded path traversal attempts
|
||||||
|
*
|
||||||
|
* @param path - The file path to sanitize and validate
|
||||||
|
* @returns The sanitized path if valid
|
||||||
|
* @throws Error if path traversal is detected
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* try {
|
||||||
|
* const safePath = sanitizePath(userInput)
|
||||||
|
* // Use safePath safely
|
||||||
|
* } catch (error) {
|
||||||
|
* // Handle invalid path
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
*/
|
*/
|
||||||
export function sanitizePath(path: string): string {
|
export function sanitizePath(path: string): string {
|
||||||
let sanitized = path.replace(/\0/g, '')
|
let sanitized = path.replace(/\0/g, '')
|
||||||
|
|
||||||
sanitized = sanitized.trim()
|
sanitized = sanitized.trim()
|
||||||
|
|
||||||
|
if (sanitized.includes('%00')) {
|
||||||
|
logger.warn('Path contains URL-encoded null bytes', {
|
||||||
|
path: path.substring(0, 100),
|
||||||
|
})
|
||||||
|
throw new Error('Path contains invalid characters')
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathTraversalPatterns = [
|
||||||
|
'../', // Standard Unix path traversal
|
||||||
|
'..\\', // Windows path traversal
|
||||||
|
'/../', // Mid-path traversal
|
||||||
|
'\\..\\', // Windows mid-path traversal
|
||||||
|
'%2e%2e%2f', // Fully encoded ../
|
||||||
|
'%2e%2e/', // Partially encoded ../
|
||||||
|
'%2e%2e%5c', // Fully encoded ..\
|
||||||
|
'%2e%2e\\', // Partially encoded ..\
|
||||||
|
'..%2f', // .. with encoded /
|
||||||
|
'..%5c', // .. with encoded \
|
||||||
|
'%252e%252e', // Double URL encoded ..
|
||||||
|
'..%252f', // .. with double encoded /
|
||||||
|
'..%255c', // .. with double encoded \
|
||||||
|
]
|
||||||
|
|
||||||
|
const lowerPath = sanitized.toLowerCase()
|
||||||
|
for (const pattern of pathTraversalPatterns) {
|
||||||
|
if (lowerPath.includes(pattern.toLowerCase())) {
|
||||||
|
logger.warn('Path traversal attempt detected', {
|
||||||
|
pattern,
|
||||||
|
path: path.substring(0, 100),
|
||||||
|
})
|
||||||
|
throw new Error('Path contains invalid path traversal sequences')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const segments = sanitized.split(/[/\\]/)
|
||||||
|
for (const segment of segments) {
|
||||||
|
if (segment === '..') {
|
||||||
|
logger.warn('Path traversal attempt detected (.. as path segment)', {
|
||||||
|
path: path.substring(0, 100),
|
||||||
|
})
|
||||||
|
throw new Error('Path contains invalid path traversal sequences')
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return sanitized
|
return sanitized
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import { account } from '@sim/db/schema'
|
|||||||
import { eq } from 'drizzle-orm'
|
import { eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { validateEnum, validatePathSegment } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -11,7 +12,6 @@ export const dynamic = 'force-dynamic'
|
|||||||
|
|
||||||
const logger = createLogger('WealthboxItemsAPI')
|
const logger = createLogger('WealthboxItemsAPI')
|
||||||
|
|
||||||
// Interface for transformed Wealthbox items
|
|
||||||
interface WealthboxItem {
|
interface WealthboxItem {
|
||||||
id: string
|
id: string
|
||||||
name: string
|
name: string
|
||||||
@@ -45,12 +45,23 @@ export async function GET(request: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
if (type !== 'contact') {
|
const credentialIdValidation = validatePathSegment(credentialId, {
|
||||||
|
paramName: 'credentialId',
|
||||||
|
maxLength: 100,
|
||||||
|
allowHyphens: true,
|
||||||
|
allowUnderscores: true,
|
||||||
|
allowDots: false,
|
||||||
|
})
|
||||||
|
if (!credentialIdValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid credentialId format: ${credentialId}`)
|
||||||
|
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
const ALLOWED_TYPES = ['contact'] as const
|
||||||
|
const typeValidation = validateEnum(type, ALLOWED_TYPES, 'type')
|
||||||
|
if (!typeValidation.isValid) {
|
||||||
logger.warn(`[${requestId}] Invalid item type: ${type}`)
|
logger.warn(`[${requestId}] Invalid item type: ${type}`)
|
||||||
return NextResponse.json(
|
return NextResponse.json({ error: typeValidation.error }, { status: 400 })
|
||||||
{ error: 'Invalid item type. Only contact is supported.' },
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
|
||||||
|
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
|
||||||
import { generateRequestId } from '@/lib/core/utils/request'
|
import { generateRequestId } from '@/lib/core/utils/request'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
@@ -12,13 +13,21 @@ export async function POST(request: Request) {
|
|||||||
try {
|
try {
|
||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
const body = await request.json()
|
const body = await request.json()
|
||||||
const { credential, workflowId } = body
|
const { credential, workflowId, siteId } = body
|
||||||
|
|
||||||
if (!credential) {
|
if (!credential) {
|
||||||
logger.error('Missing credential in request')
|
logger.error('Missing credential in request')
|
||||||
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
|
return NextResponse.json({ error: 'Credential is required' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (siteId) {
|
||||||
|
const siteIdValidation = validateAlphanumericId(siteId, 'siteId')
|
||||||
|
if (!siteIdValidation.isValid) {
|
||||||
|
logger.error('Invalid siteId', { error: siteIdValidation.error })
|
||||||
|
return NextResponse.json({ error: siteIdValidation.error }, { status: 400 })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const authz = await authorizeCredentialUse(request as any, {
|
const authz = await authorizeCredentialUse(request as any, {
|
||||||
credentialId: credential,
|
credentialId: credential,
|
||||||
workflowId,
|
workflowId,
|
||||||
@@ -46,7 +55,11 @@ export async function POST(request: Request) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await fetch('https://api.webflow.com/v2/sites', {
|
const url = siteId
|
||||||
|
? `https://api.webflow.com/v2/sites/${siteId}`
|
||||||
|
: 'https://api.webflow.com/v2/sites'
|
||||||
|
|
||||||
|
const response = await fetch(url, {
|
||||||
headers: {
|
headers: {
|
||||||
Authorization: `Bearer ${accessToken}`,
|
Authorization: `Bearer ${accessToken}`,
|
||||||
accept: 'application/json',
|
accept: 'application/json',
|
||||||
@@ -58,6 +71,7 @@ export async function POST(request: Request) {
|
|||||||
logger.error('Failed to fetch Webflow sites', {
|
logger.error('Failed to fetch Webflow sites', {
|
||||||
status: response.status,
|
status: response.status,
|
||||||
error: errorData,
|
error: errorData,
|
||||||
|
siteId: siteId || 'all',
|
||||||
})
|
})
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{ error: 'Failed to fetch Webflow sites', details: errorData },
|
{ error: 'Failed to fetch Webflow sites', details: errorData },
|
||||||
@@ -66,7 +80,13 @@ export async function POST(request: Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const data = await response.json()
|
const data = await response.json()
|
||||||
const sites = data.sites || []
|
|
||||||
|
let sites: any[]
|
||||||
|
if (siteId) {
|
||||||
|
sites = [data]
|
||||||
|
} else {
|
||||||
|
sites = data.sites || []
|
||||||
|
}
|
||||||
|
|
||||||
const formattedSites = sites.map((site: any) => ({
|
const formattedSites = sites.map((site: any) => ({
|
||||||
id: site.id,
|
id: site.id,
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ export async function GET(req: NextRequest) {
|
|||||||
return NextResponse.json({ error: 'Missing email or token parameter' }, { status: 400 })
|
return NextResponse.json({ error: 'Missing email or token parameter' }, { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify token and get email type
|
|
||||||
const tokenVerification = verifyUnsubscribeToken(email, token)
|
const tokenVerification = verifyUnsubscribeToken(email, token)
|
||||||
if (!tokenVerification.valid) {
|
if (!tokenVerification.valid) {
|
||||||
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
|
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
|
||||||
@@ -42,7 +41,6 @@ export async function GET(req: NextRequest) {
|
|||||||
const emailType = tokenVerification.emailType as EmailType
|
const emailType = tokenVerification.emailType as EmailType
|
||||||
const isTransactional = isTransactionalEmail(emailType)
|
const isTransactional = isTransactionalEmail(emailType)
|
||||||
|
|
||||||
// Get current preferences
|
|
||||||
const preferences = await getEmailPreferences(email)
|
const preferences = await getEmailPreferences(email)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -67,22 +65,42 @@ export async function POST(req: NextRequest) {
|
|||||||
const requestId = generateRequestId()
|
const requestId = generateRequestId()
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const body = await req.json()
|
const { searchParams } = new URL(req.url)
|
||||||
const result = unsubscribeSchema.safeParse(body)
|
const contentType = req.headers.get('content-type') || ''
|
||||||
|
|
||||||
if (!result.success) {
|
let email: string
|
||||||
logger.warn(`[${requestId}] Invalid unsubscribe POST data`, {
|
let token: string
|
||||||
errors: result.error.format(),
|
let type: 'all' | 'marketing' | 'updates' | 'notifications' = 'all'
|
||||||
})
|
|
||||||
return NextResponse.json(
|
if (contentType.includes('application/x-www-form-urlencoded')) {
|
||||||
{ error: 'Invalid request data', details: result.error.format() },
|
email = searchParams.get('email') || ''
|
||||||
{ status: 400 }
|
token = searchParams.get('token') || ''
|
||||||
)
|
|
||||||
|
if (!email || !token) {
|
||||||
|
logger.warn(`[${requestId}] One-click unsubscribe missing email or token in URL`)
|
||||||
|
return NextResponse.json({ error: 'Missing email or token parameter' }, { status: 400 })
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(`[${requestId}] Processing one-click unsubscribe for: ${email}`)
|
||||||
|
} else {
|
||||||
|
const body = await req.json()
|
||||||
|
const result = unsubscribeSchema.safeParse(body)
|
||||||
|
|
||||||
|
if (!result.success) {
|
||||||
|
logger.warn(`[${requestId}] Invalid unsubscribe POST data`, {
|
||||||
|
errors: result.error.format(),
|
||||||
|
})
|
||||||
|
return NextResponse.json(
|
||||||
|
{ error: 'Invalid request data', details: result.error.format() },
|
||||||
|
{ status: 400 }
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
email = result.data.email
|
||||||
|
token = result.data.token
|
||||||
|
type = result.data.type
|
||||||
}
|
}
|
||||||
|
|
||||||
const { email, token, type } = result.data
|
|
||||||
|
|
||||||
// Verify token and get email type
|
|
||||||
const tokenVerification = verifyUnsubscribeToken(email, token)
|
const tokenVerification = verifyUnsubscribeToken(email, token)
|
||||||
if (!tokenVerification.valid) {
|
if (!tokenVerification.valid) {
|
||||||
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
|
logger.warn(`[${requestId}] Invalid unsubscribe token for email: ${email}`)
|
||||||
@@ -92,7 +110,6 @@ export async function POST(req: NextRequest) {
|
|||||||
const emailType = tokenVerification.emailType as EmailType
|
const emailType = tokenVerification.emailType as EmailType
|
||||||
const isTransactional = isTransactionalEmail(emailType)
|
const isTransactional = isTransactionalEmail(emailType)
|
||||||
|
|
||||||
// Prevent unsubscribing from transactional emails
|
|
||||||
if (isTransactional) {
|
if (isTransactional) {
|
||||||
logger.warn(`[${requestId}] Attempted to unsubscribe from transactional email: ${email}`)
|
logger.warn(`[${requestId}] Attempted to unsubscribe from transactional email: ${email}`)
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
@@ -106,7 +123,6 @@ export async function POST(req: NextRequest) {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process unsubscribe based on type
|
|
||||||
let success = false
|
let success = false
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case 'all':
|
case 'all':
|
||||||
@@ -130,7 +146,6 @@ export async function POST(req: NextRequest) {
|
|||||||
|
|
||||||
logger.info(`[${requestId}] Successfully unsubscribed ${email} from ${type}`)
|
logger.info(`[${requestId}] Successfully unsubscribed ${email} from ${type}`)
|
||||||
|
|
||||||
// Return 200 for one-click unsubscribe compliance
|
|
||||||
return NextResponse.json(
|
return NextResponse.json(
|
||||||
{
|
{
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@@ -1,97 +0,0 @@
|
|||||||
import { db } from '@sim/db'
|
|
||||||
import { userStats, workflow } from '@sim/db/schema'
|
|
||||||
import { eq, sql } from 'drizzle-orm'
|
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
|
||||||
import { z } from 'zod'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
|
||||||
|
|
||||||
const logger = createLogger('WorkflowStatsAPI')
|
|
||||||
|
|
||||||
const queryParamsSchema = z.object({
|
|
||||||
runs: z.coerce.number().int().min(1).max(100).default(1),
|
|
||||||
})
|
|
||||||
|
|
||||||
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
|
|
||||||
const { id } = await params
|
|
||||||
const searchParams = request.nextUrl.searchParams
|
|
||||||
|
|
||||||
const validation = queryParamsSchema.safeParse({
|
|
||||||
runs: searchParams.get('runs'),
|
|
||||||
})
|
|
||||||
|
|
||||||
if (!validation.success) {
|
|
||||||
logger.error(`Invalid query parameters: ${validation.error.message}`)
|
|
||||||
return NextResponse.json(
|
|
||||||
{
|
|
||||||
error:
|
|
||||||
validation.error.errors[0]?.message ||
|
|
||||||
'Invalid number of runs. Must be between 1 and 100.',
|
|
||||||
},
|
|
||||||
{ status: 400 }
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
const { runs } = validation.data
|
|
||||||
|
|
||||||
try {
|
|
||||||
const [workflowRecord] = await db.select().from(workflow).where(eq(workflow.id, id)).limit(1)
|
|
||||||
|
|
||||||
if (!workflowRecord) {
|
|
||||||
return NextResponse.json({ error: `Workflow ${id} not found` }, { status: 404 })
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await db
|
|
||||||
.update(workflow)
|
|
||||||
.set({
|
|
||||||
runCount: workflowRecord.runCount + runs,
|
|
||||||
lastRunAt: new Date(),
|
|
||||||
})
|
|
||||||
.where(eq(workflow.id, id))
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error updating workflow runCount:', error)
|
|
||||||
throw error
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const userStatsRecords = await db
|
|
||||||
.select()
|
|
||||||
.from(userStats)
|
|
||||||
.where(eq(userStats.userId, workflowRecord.userId))
|
|
||||||
|
|
||||||
if (userStatsRecords.length === 0) {
|
|
||||||
await db.insert(userStats).values({
|
|
||||||
id: crypto.randomUUID(),
|
|
||||||
userId: workflowRecord.userId,
|
|
||||||
totalManualExecutions: 0,
|
|
||||||
totalApiCalls: 0,
|
|
||||||
totalWebhookTriggers: 0,
|
|
||||||
totalScheduledExecutions: 0,
|
|
||||||
totalChatExecutions: 0,
|
|
||||||
totalTokensUsed: 0,
|
|
||||||
totalCost: '0.00',
|
|
||||||
lastActive: sql`now()`,
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
await db
|
|
||||||
.update(userStats)
|
|
||||||
.set({
|
|
||||||
lastActive: sql`now()`,
|
|
||||||
})
|
|
||||||
.where(eq(userStats.userId, workflowRecord.userId))
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Error ensuring userStats for userId ${workflowRecord.userId}:`, error)
|
|
||||||
// Don't rethrow - we want to continue even if this fails
|
|
||||||
}
|
|
||||||
|
|
||||||
return NextResponse.json({
|
|
||||||
success: true,
|
|
||||||
runsAdded: runs,
|
|
||||||
newTotal: workflowRecord.runCount + runs,
|
|
||||||
})
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Error updating workflow stats:', error)
|
|
||||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -173,7 +173,7 @@ export async function GET(
|
|||||||
|
|
||||||
// DELETE /api/workspaces/invitations/[invitationId] - Delete a workspace invitation
|
// DELETE /api/workspaces/invitations/[invitationId] - Delete a workspace invitation
|
||||||
export async function DELETE(
|
export async function DELETE(
|
||||||
_req: NextRequest,
|
_request: NextRequest,
|
||||||
{ params }: { params: Promise<{ invitationId: string }> }
|
{ params }: { params: Promise<{ invitationId: string }> }
|
||||||
) {
|
) {
|
||||||
const { invitationId } = await params
|
const { invitationId } = await params
|
||||||
@@ -221,7 +221,7 @@ export async function DELETE(
|
|||||||
|
|
||||||
// POST /api/workspaces/invitations/[invitationId] - Resend a workspace invitation
|
// POST /api/workspaces/invitations/[invitationId] - Resend a workspace invitation
|
||||||
export async function POST(
|
export async function POST(
|
||||||
_req: NextRequest,
|
_request: NextRequest,
|
||||||
{ params }: { params: Promise<{ invitationId: string }> }
|
{ params }: { params: Promise<{ invitationId: string }> }
|
||||||
) {
|
) {
|
||||||
const { invitationId } = await params
|
const { invitationId } = await params
|
||||||
|
|||||||
@@ -29,30 +29,24 @@ export const metadata: Metadata = {
|
|||||||
locale: 'en_US',
|
locale: 'en_US',
|
||||||
images: [
|
images: [
|
||||||
{
|
{
|
||||||
url: '/social/og-image.png',
|
url: '/logo/primary/rounded.png',
|
||||||
width: 1200,
|
width: 512,
|
||||||
height: 630,
|
height: 512,
|
||||||
alt: 'Sim - Visual AI Workflow Builder',
|
alt: 'Sim - AI Agent Workflow Builder',
|
||||||
type: 'image/png',
|
type: 'image/png',
|
||||||
},
|
},
|
||||||
{
|
|
||||||
url: '/social/og-image-square.png',
|
|
||||||
width: 600,
|
|
||||||
height: 600,
|
|
||||||
alt: 'Sim Logo',
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
twitter: {
|
twitter: {
|
||||||
card: 'summary_large_image',
|
card: 'summary',
|
||||||
site: '@simdotai',
|
site: '@simdotai',
|
||||||
creator: '@simdotai',
|
creator: '@simdotai',
|
||||||
title: 'Sim - AI Agent Workflow Builder | Open Source',
|
title: 'Sim - AI Agent Workflow Builder | Open Source',
|
||||||
description:
|
description:
|
||||||
'Open-source platform for agentic workflows. 60,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.',
|
'Open-source platform for agentic workflows. 60,000+ developers. Visual builder. 100+ integrations. SOC2 & HIPAA compliant.',
|
||||||
images: {
|
images: {
|
||||||
url: '/social/twitter-image.png',
|
url: '/logo/primary/rounded.png',
|
||||||
alt: 'Sim - Visual AI Workflow Builder',
|
alt: 'Sim - AI Agent Workflow Builder',
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
alternates: {
|
alternates: {
|
||||||
@@ -77,7 +71,6 @@ export const metadata: Metadata = {
|
|||||||
category: 'technology',
|
category: 'technology',
|
||||||
classification: 'AI Development Tools',
|
classification: 'AI Development Tools',
|
||||||
referrer: 'origin-when-cross-origin',
|
referrer: 'origin-when-cross-origin',
|
||||||
// LLM SEO optimizations
|
|
||||||
other: {
|
other: {
|
||||||
'llm:content-type': 'AI workflow builder, visual programming, no-code AI development',
|
'llm:content-type': 'AI workflow builder, visual programming, no-code AI development',
|
||||||
'llm:use-cases':
|
'llm:use-cases':
|
||||||
|
|||||||
@@ -1,5 +1,88 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { templateCreators, templates } from '@sim/db/schema'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import type { Metadata } from 'next'
|
||||||
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import TemplateDetails from '@/app/templates/[id]/template'
|
import TemplateDetails from '@/app/templates/[id]/template'
|
||||||
|
|
||||||
|
const logger = createLogger('TemplateMetadata')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate dynamic metadata for template pages.
|
||||||
|
* This provides OpenGraph images for social media sharing.
|
||||||
|
*/
|
||||||
|
export async function generateMetadata({
|
||||||
|
params,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ id: string }>
|
||||||
|
}): Promise<Metadata> {
|
||||||
|
const { id } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await db
|
||||||
|
.select({
|
||||||
|
template: templates,
|
||||||
|
creator: templateCreators,
|
||||||
|
})
|
||||||
|
.from(templates)
|
||||||
|
.leftJoin(templateCreators, eq(templates.creatorId, templateCreators.id))
|
||||||
|
.where(eq(templates.id, id))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return {
|
||||||
|
title: 'Template Not Found',
|
||||||
|
description: 'The requested template could not be found.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { template, creator } = result[0]
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
|
|
||||||
|
const details = template.details as { tagline?: string; about?: string } | null
|
||||||
|
const description = details?.tagline || 'AI workflow template on Sim'
|
||||||
|
|
||||||
|
const hasOgImage = !!template.ogImageUrl
|
||||||
|
const ogImageUrl = template.ogImageUrl || `${baseUrl}/logo/primary/rounded.png`
|
||||||
|
|
||||||
|
return {
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
openGraph: {
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
type: 'website',
|
||||||
|
url: `${baseUrl}/templates/${id}`,
|
||||||
|
siteName: 'Sim',
|
||||||
|
images: [
|
||||||
|
{
|
||||||
|
url: ogImageUrl,
|
||||||
|
width: hasOgImage ? 1200 : 512,
|
||||||
|
height: hasOgImage ? 630 : 512,
|
||||||
|
alt: `${template.name} - Workflow Preview`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
twitter: {
|
||||||
|
card: hasOgImage ? 'summary_large_image' : 'summary',
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
images: [ogImageUrl],
|
||||||
|
creator: creator?.details
|
||||||
|
? ((creator.details as Record<string, unknown>).xHandle as string) || undefined
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to generate template metadata:', error)
|
||||||
|
return {
|
||||||
|
title: 'Template',
|
||||||
|
description: 'AI workflow template on Sim',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Public template detail page for unauthenticated users.
|
* Public template detail page for unauthenticated users.
|
||||||
* Authenticated-user redirect is handled in templates/[id]/layout.tsx.
|
* Authenticated-user redirect is handled in templates/[id]/layout.tsx.
|
||||||
|
|||||||
@@ -39,7 +39,6 @@ function UnsubscribeContent() {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate the unsubscribe link
|
|
||||||
fetch(
|
fetch(
|
||||||
`/api/users/me/settings/unsubscribe?email=${encodeURIComponent(email)}&token=${encodeURIComponent(token)}`
|
`/api/users/me/settings/unsubscribe?email=${encodeURIComponent(email)}&token=${encodeURIComponent(token)}`
|
||||||
)
|
)
|
||||||
@@ -81,9 +80,7 @@ function UnsubscribeContent() {
|
|||||||
|
|
||||||
if (result.success) {
|
if (result.success) {
|
||||||
setUnsubscribed(true)
|
setUnsubscribed(true)
|
||||||
// Update the data to reflect the change
|
|
||||||
if (data) {
|
if (data) {
|
||||||
// Type-safe property construction with validation
|
|
||||||
const validTypes = ['all', 'marketing', 'updates', 'notifications'] as const
|
const validTypes = ['all', 'marketing', 'updates', 'notifications'] as const
|
||||||
if (validTypes.includes(type)) {
|
if (validTypes.includes(type)) {
|
||||||
if (type === 'all') {
|
if (type === 'all') {
|
||||||
@@ -192,7 +189,6 @@ function UnsubscribeContent() {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle transactional emails
|
|
||||||
if (data?.isTransactional) {
|
if (data?.isTransactional) {
|
||||||
return (
|
return (
|
||||||
<div className='flex min-h-screen items-center justify-center bg-background p-4'>
|
<div className='flex min-h-screen items-center justify-center bg-background p-4'>
|
||||||
|
|||||||
@@ -1,8 +1,16 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { templateCreators, templates } from '@sim/db/schema'
|
||||||
|
import { eq } from 'drizzle-orm'
|
||||||
|
import type { Metadata } from 'next'
|
||||||
import { redirect } from 'next/navigation'
|
import { redirect } from 'next/navigation'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||||
import TemplateDetails from '@/app/templates/[id]/template'
|
import TemplateDetails from '@/app/templates/[id]/template'
|
||||||
|
|
||||||
|
const logger = createLogger('WorkspaceTemplateMetadata')
|
||||||
|
|
||||||
interface TemplatePageProps {
|
interface TemplatePageProps {
|
||||||
params: Promise<{
|
params: Promise<{
|
||||||
workspaceId: string
|
workspaceId: string
|
||||||
@@ -10,6 +18,81 @@ interface TemplatePageProps {
|
|||||||
}>
|
}>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate dynamic metadata for workspace template pages.
|
||||||
|
* This provides OpenGraph images for social media sharing.
|
||||||
|
*/
|
||||||
|
export async function generateMetadata({
|
||||||
|
params,
|
||||||
|
}: {
|
||||||
|
params: Promise<{ workspaceId: string; id: string }>
|
||||||
|
}): Promise<Metadata> {
|
||||||
|
const { workspaceId, id } = await params
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await db
|
||||||
|
.select({
|
||||||
|
template: templates,
|
||||||
|
creator: templateCreators,
|
||||||
|
})
|
||||||
|
.from(templates)
|
||||||
|
.leftJoin(templateCreators, eq(templates.creatorId, templateCreators.id))
|
||||||
|
.where(eq(templates.id, id))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (result.length === 0) {
|
||||||
|
return {
|
||||||
|
title: 'Template Not Found',
|
||||||
|
description: 'The requested template could not be found.',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const { template, creator } = result[0]
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
|
|
||||||
|
const details = template.details as { tagline?: string; about?: string } | null
|
||||||
|
const description = details?.tagline || 'AI workflow template on Sim'
|
||||||
|
|
||||||
|
const hasOgImage = !!template.ogImageUrl
|
||||||
|
const ogImageUrl = template.ogImageUrl || `${baseUrl}/logo/primary/rounded.png`
|
||||||
|
|
||||||
|
return {
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
openGraph: {
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
type: 'website',
|
||||||
|
url: `${baseUrl}/workspace/${workspaceId}/templates/${id}`,
|
||||||
|
siteName: 'Sim',
|
||||||
|
images: [
|
||||||
|
{
|
||||||
|
url: ogImageUrl,
|
||||||
|
width: hasOgImage ? 1200 : 512,
|
||||||
|
height: hasOgImage ? 630 : 512,
|
||||||
|
alt: `${template.name} - Workflow Preview`,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
twitter: {
|
||||||
|
card: hasOgImage ? 'summary_large_image' : 'summary',
|
||||||
|
title: template.name,
|
||||||
|
description,
|
||||||
|
images: [ogImageUrl],
|
||||||
|
creator: creator?.details
|
||||||
|
? ((creator.details as Record<string, unknown>).xHandle as string) || undefined
|
||||||
|
: undefined,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to generate workspace template metadata:', error)
|
||||||
|
return {
|
||||||
|
title: 'Template',
|
||||||
|
description: 'AI workflow template on Sim',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Workspace-scoped template detail page.
|
* Workspace-scoped template detail page.
|
||||||
* Requires authentication and workspace membership to access.
|
* Requires authentication and workspace membership to access.
|
||||||
@@ -19,12 +102,10 @@ export default async function TemplatePage({ params }: TemplatePageProps) {
|
|||||||
const { workspaceId, id } = await params
|
const { workspaceId, id } = await params
|
||||||
const session = await getSession()
|
const session = await getSession()
|
||||||
|
|
||||||
// Redirect unauthenticated users to public template detail page
|
|
||||||
if (!session?.user?.id) {
|
if (!session?.user?.id) {
|
||||||
redirect(`/templates/${id}`)
|
redirect(`/templates/${id}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Verify workspace membership
|
|
||||||
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
const hasPermission = await verifyWorkspaceMembership(session.user.id, workspaceId)
|
||||||
if (!hasPermission) {
|
if (!hasPermission) {
|
||||||
redirect('/')
|
redirect('/')
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
'use client'
|
'use client'
|
||||||
|
|
||||||
import { useEffect, useState } from 'react'
|
import React, { useEffect, useRef, useState } from 'react'
|
||||||
import { Loader2 } from 'lucide-react'
|
import { Loader2 } from 'lucide-react'
|
||||||
import {
|
import {
|
||||||
Button,
|
Button,
|
||||||
@@ -18,6 +18,7 @@ import { Skeleton, TagInput } from '@/components/ui'
|
|||||||
import { useSession } from '@/lib/auth/auth-client'
|
import { useSession } from '@/lib/auth/auth-client'
|
||||||
import { cn } from '@/lib/core/utils/cn'
|
import { cn } from '@/lib/core/utils/cn'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
import { captureAndUploadOGImage, OG_IMAGE_HEIGHT, OG_IMAGE_WIDTH } from '@/lib/og'
|
||||||
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/workflow-preview/workflow-preview'
|
import { WorkflowPreview } from '@/app/workspace/[workspaceId]/w/components/workflow-preview/workflow-preview'
|
||||||
import {
|
import {
|
||||||
useCreateTemplate,
|
useCreateTemplate,
|
||||||
@@ -25,6 +26,7 @@ import {
|
|||||||
useTemplateByWorkflow,
|
useTemplateByWorkflow,
|
||||||
useUpdateTemplate,
|
useUpdateTemplate,
|
||||||
} from '@/hooks/queries/templates'
|
} from '@/hooks/queries/templates'
|
||||||
|
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||||
|
|
||||||
const logger = createLogger('TemplateDeploy')
|
const logger = createLogger('TemplateDeploy')
|
||||||
@@ -79,6 +81,9 @@ export function TemplateDeploy({
|
|||||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
const [showDeleteDialog, setShowDeleteDialog] = useState(false)
|
||||||
const [creatorOptions, setCreatorOptions] = useState<CreatorOption[]>([])
|
const [creatorOptions, setCreatorOptions] = useState<CreatorOption[]>([])
|
||||||
const [loadingCreators, setLoadingCreators] = useState(false)
|
const [loadingCreators, setLoadingCreators] = useState(false)
|
||||||
|
const [isCapturing, setIsCapturing] = useState(false)
|
||||||
|
const previewContainerRef = useRef<HTMLDivElement>(null)
|
||||||
|
const ogCaptureRef = useRef<HTMLDivElement>(null)
|
||||||
|
|
||||||
const [formData, setFormData] = useState<TemplateFormData>(initialFormData)
|
const [formData, setFormData] = useState<TemplateFormData>(initialFormData)
|
||||||
|
|
||||||
@@ -208,6 +213,8 @@ export function TemplateDeploy({
|
|||||||
tags: formData.tags,
|
tags: formData.tags,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let templateId: string
|
||||||
|
|
||||||
if (existingTemplate) {
|
if (existingTemplate) {
|
||||||
await updateMutation.mutateAsync({
|
await updateMutation.mutateAsync({
|
||||||
id: existingTemplate.id,
|
id: existingTemplate.id,
|
||||||
@@ -216,11 +223,32 @@ export function TemplateDeploy({
|
|||||||
updateState: true,
|
updateState: true,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
templateId = existingTemplate.id
|
||||||
} else {
|
} else {
|
||||||
await createMutation.mutateAsync({ ...templateData, workflowId })
|
const result = await createMutation.mutateAsync({ ...templateData, workflowId })
|
||||||
|
templateId = result.id
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully`)
|
logger.info(`Template ${existingTemplate ? 'updated' : 'created'} successfully`)
|
||||||
|
|
||||||
|
setIsCapturing(true)
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
requestAnimationFrame(async () => {
|
||||||
|
try {
|
||||||
|
if (ogCaptureRef.current) {
|
||||||
|
const ogUrl = await captureAndUploadOGImage(ogCaptureRef.current, templateId)
|
||||||
|
if (ogUrl) {
|
||||||
|
logger.info(`OG image uploaded for template ${templateId}: ${ogUrl}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (ogError) {
|
||||||
|
logger.warn('Failed to capture/upload OG image:', ogError)
|
||||||
|
} finally {
|
||||||
|
setIsCapturing(false)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
onDeploymentComplete?.()
|
onDeploymentComplete?.()
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to save template:', error)
|
logger.error('Failed to save template:', error)
|
||||||
@@ -275,6 +303,7 @@ export function TemplateDeploy({
|
|||||||
Live Template
|
Live Template
|
||||||
</Label>
|
</Label>
|
||||||
<div
|
<div
|
||||||
|
ref={previewContainerRef}
|
||||||
className='[&_*]:!cursor-default relative h-[260px] w-full cursor-default overflow-hidden rounded-[4px] border border-[var(--border)]'
|
className='[&_*]:!cursor-default relative h-[260px] w-full cursor-default overflow-hidden rounded-[4px] border border-[var(--border)]'
|
||||||
onWheelCapture={(e) => {
|
onWheelCapture={(e) => {
|
||||||
if (e.ctrlKey || e.metaKey) return
|
if (e.ctrlKey || e.metaKey) return
|
||||||
@@ -423,10 +452,65 @@ export function TemplateDeploy({
|
|||||||
</ModalFooter>
|
</ModalFooter>
|
||||||
</ModalContent>
|
</ModalContent>
|
||||||
</Modal>
|
</Modal>
|
||||||
|
|
||||||
|
{/* Hidden container for OG image capture */}
|
||||||
|
{isCapturing && <OGCaptureContainer ref={ogCaptureRef} />}
|
||||||
</div>
|
</div>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hidden container for OG image capture.
|
||||||
|
* Lazy-rendered only when capturing - gets workflow state from store on mount.
|
||||||
|
*/
|
||||||
|
const OGCaptureContainer = React.forwardRef<HTMLDivElement>((_, ref) => {
|
||||||
|
const blocks = useWorkflowStore((state) => state.blocks)
|
||||||
|
const edges = useWorkflowStore((state) => state.edges)
|
||||||
|
const loops = useWorkflowStore((state) => state.loops)
|
||||||
|
const parallels = useWorkflowStore((state) => state.parallels)
|
||||||
|
|
||||||
|
if (!blocks || Object.keys(blocks).length === 0) {
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const workflowState: WorkflowState = {
|
||||||
|
blocks,
|
||||||
|
edges: edges ?? [],
|
||||||
|
loops: loops ?? {},
|
||||||
|
parallels: parallels ?? {},
|
||||||
|
lastSaved: Date.now(),
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
ref={ref}
|
||||||
|
style={{
|
||||||
|
position: 'absolute',
|
||||||
|
left: '-9999px',
|
||||||
|
top: '-9999px',
|
||||||
|
width: OG_IMAGE_WIDTH,
|
||||||
|
height: OG_IMAGE_HEIGHT,
|
||||||
|
backgroundColor: '#0c0c0c',
|
||||||
|
overflow: 'hidden',
|
||||||
|
}}
|
||||||
|
aria-hidden='true'
|
||||||
|
>
|
||||||
|
<WorkflowPreview
|
||||||
|
workflowState={workflowState}
|
||||||
|
showSubBlocks={false}
|
||||||
|
height='100%'
|
||||||
|
width='100%'
|
||||||
|
isPannable={false}
|
||||||
|
defaultZoom={0.8}
|
||||||
|
fitPadding={0.2}
|
||||||
|
lightweight
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
OGCaptureContainer.displayName = 'OGCaptureContainer'
|
||||||
|
|
||||||
interface TemplatePreviewContentProps {
|
interface TemplatePreviewContentProps {
|
||||||
existingTemplate:
|
existingTemplate:
|
||||||
| {
|
| {
|
||||||
|
|||||||
@@ -844,8 +844,13 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
if (!accessibleBlock) continue
|
if (!accessibleBlock) continue
|
||||||
|
|
||||||
// Skip the current block - blocks cannot reference their own outputs
|
// Skip the current block - blocks cannot reference their own outputs
|
||||||
// Exception: approval blocks can reference their own outputs
|
// Exception: approval and human_in_the_loop blocks can reference their own outputs
|
||||||
if (accessibleBlockId === blockId && accessibleBlock.type !== 'approval') continue
|
if (
|
||||||
|
accessibleBlockId === blockId &&
|
||||||
|
accessibleBlock.type !== 'approval' &&
|
||||||
|
accessibleBlock.type !== 'human_in_the_loop'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
const blockConfig = getBlock(accessibleBlock.type)
|
const blockConfig = getBlock(accessibleBlock.type)
|
||||||
|
|
||||||
@@ -972,6 +977,8 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
const allTags = outputPaths.map((path) => `${normalizedBlockName}.${path}`)
|
||||||
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
blockTags = isSelfReference ? allTags.filter((tag) => tag.endsWith('.url')) : allTags
|
||||||
}
|
}
|
||||||
|
} else if (accessibleBlock.type === 'human_in_the_loop') {
|
||||||
|
blockTags = [`${normalizedBlockName}.url`]
|
||||||
} else {
|
} else {
|
||||||
const operationValue =
|
const operationValue =
|
||||||
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
mergedSubBlocks?.operation?.value ?? getSubBlockValue(accessibleBlockId, 'operation')
|
||||||
@@ -1214,31 +1221,25 @@ export const TagDropdown: React.FC<TagDropdownProps> = ({
|
|||||||
|
|
||||||
let processedTag = tag
|
let processedTag = tag
|
||||||
|
|
||||||
// Check if this is a file property and add [0] automatically
|
|
||||||
// Only include user-accessible fields (matches UserFile interface)
|
|
||||||
const fileProperties = ['id', 'name', 'url', 'size', 'type']
|
|
||||||
const parts = tag.split('.')
|
const parts = tag.split('.')
|
||||||
if (parts.length >= 2 && fileProperties.includes(parts[parts.length - 1])) {
|
if (parts.length >= 3 && blockGroup) {
|
||||||
const fieldName = parts[parts.length - 2]
|
const arrayFieldName = parts[1] // e.g., "channels", "files", "users"
|
||||||
|
const block = useWorkflowStore.getState().blocks[blockGroup.blockId]
|
||||||
|
const blockConfig = block ? (getBlock(block.type) ?? null) : null
|
||||||
|
const mergedSubBlocks = getMergedSubBlocks(blockGroup.blockId)
|
||||||
|
|
||||||
if (blockGroup) {
|
const fieldType = getOutputTypeForPath(
|
||||||
const block = useWorkflowStore.getState().blocks[blockGroup.blockId]
|
block,
|
||||||
const blockConfig = block ? (getBlock(block.type) ?? null) : null
|
blockConfig,
|
||||||
const mergedSubBlocks = getMergedSubBlocks(blockGroup.blockId)
|
blockGroup.blockId,
|
||||||
|
arrayFieldName,
|
||||||
|
mergedSubBlocks
|
||||||
|
)
|
||||||
|
|
||||||
const fieldType = getOutputTypeForPath(
|
if (fieldType === 'files' || fieldType === 'array') {
|
||||||
block,
|
const blockName = parts[0]
|
||||||
blockConfig,
|
const remainingPath = parts.slice(2).join('.')
|
||||||
blockGroup.blockId,
|
processedTag = `${blockName}.${arrayFieldName}[0].${remainingPath}`
|
||||||
fieldName,
|
|
||||||
mergedSubBlocks
|
|
||||||
)
|
|
||||||
|
|
||||||
if (fieldType === 'files') {
|
|
||||||
const blockAndField = parts.slice(0, -1).join('.')
|
|
||||||
const property = parts[parts.length - 1]
|
|
||||||
processedTag = `${blockAndField}[0].${property}`
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
{ label: 'Delete Issue Link', id: 'delete_link' },
|
{ label: 'Delete Issue Link', id: 'delete_link' },
|
||||||
{ label: 'Add Watcher', id: 'add_watcher' },
|
{ label: 'Add Watcher', id: 'add_watcher' },
|
||||||
{ label: 'Remove Watcher', id: 'remove_watcher' },
|
{ label: 'Remove Watcher', id: 'remove_watcher' },
|
||||||
|
{ label: 'Get Users', id: 'get_users' },
|
||||||
],
|
],
|
||||||
value: () => 'read',
|
value: () => 'read',
|
||||||
},
|
},
|
||||||
@@ -194,6 +195,71 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
dependsOn: ['projectId'],
|
dependsOn: ['projectId'],
|
||||||
condition: { field: 'operation', value: ['update', 'write'] },
|
condition: { field: 'operation', value: ['update', 'write'] },
|
||||||
},
|
},
|
||||||
|
// Write Issue additional fields
|
||||||
|
{
|
||||||
|
id: 'assignee',
|
||||||
|
title: 'Assignee Account ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Assignee account ID (e.g., 5b109f2e9729b51b54dc274d)',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'priority',
|
||||||
|
title: 'Priority',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Priority ID or name (e.g., "10000" or "High")',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'labels',
|
||||||
|
title: 'Labels',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Comma-separated labels (e.g., bug, urgent)',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'duedate',
|
||||||
|
title: 'Due Date',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'YYYY-MM-DD (e.g., 2024-12-31)',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'reporter',
|
||||||
|
title: 'Reporter Account ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Reporter account ID',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'environment',
|
||||||
|
title: 'Environment',
|
||||||
|
type: 'long-input',
|
||||||
|
placeholder: 'Environment information (e.g., Production, Staging)',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'customFieldId',
|
||||||
|
title: 'Custom Field ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'e.g., customfield_10001 or 10001',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'teamUuid',
|
||||||
|
title: 'Team UUID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'e.g., b3aa307a-76ea-462d-b6f1-a6e89ce9858a',
|
||||||
|
dependsOn: ['projectId'],
|
||||||
|
condition: { field: 'operation', value: 'write' },
|
||||||
|
},
|
||||||
// Delete Issue fields
|
// Delete Issue fields
|
||||||
{
|
{
|
||||||
id: 'deleteSubtasks',
|
id: 'deleteSubtasks',
|
||||||
@@ -351,6 +417,28 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
placeholder: 'Enter link ID to delete',
|
placeholder: 'Enter link ID to delete',
|
||||||
condition: { field: 'operation', value: 'delete_link' },
|
condition: { field: 'operation', value: 'delete_link' },
|
||||||
},
|
},
|
||||||
|
// Get Users fields
|
||||||
|
{
|
||||||
|
id: 'userAccountId',
|
||||||
|
title: 'Account ID',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Enter account ID for specific user',
|
||||||
|
condition: { field: 'operation', value: 'get_users' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'usersStartAt',
|
||||||
|
title: 'Start At',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Pagination start index (default: 0)',
|
||||||
|
condition: { field: 'operation', value: 'get_users' },
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: 'usersMaxResults',
|
||||||
|
title: 'Max Results',
|
||||||
|
type: 'short-input',
|
||||||
|
placeholder: 'Maximum users to return (default: 50)',
|
||||||
|
condition: { field: 'operation', value: 'get_users' },
|
||||||
|
},
|
||||||
// Trigger SubBlocks
|
// Trigger SubBlocks
|
||||||
...getTrigger('jira_issue_created').subBlocks,
|
...getTrigger('jira_issue_created').subBlocks,
|
||||||
...getTrigger('jira_issue_updated').subBlocks,
|
...getTrigger('jira_issue_updated').subBlocks,
|
||||||
@@ -383,6 +471,7 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
'jira_delete_issue_link',
|
'jira_delete_issue_link',
|
||||||
'jira_add_watcher',
|
'jira_add_watcher',
|
||||||
'jira_remove_watcher',
|
'jira_remove_watcher',
|
||||||
|
'jira_get_users',
|
||||||
],
|
],
|
||||||
config: {
|
config: {
|
||||||
tool: (params) => {
|
tool: (params) => {
|
||||||
@@ -438,6 +527,8 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
return 'jira_add_watcher'
|
return 'jira_add_watcher'
|
||||||
case 'remove_watcher':
|
case 'remove_watcher':
|
||||||
return 'jira_remove_watcher'
|
return 'jira_remove_watcher'
|
||||||
|
case 'get_users':
|
||||||
|
return 'jira_get_users'
|
||||||
default:
|
default:
|
||||||
return 'jira_retrieve'
|
return 'jira_retrieve'
|
||||||
}
|
}
|
||||||
@@ -461,12 +552,29 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
'Project ID is required. Please select a project or enter a project ID manually.'
|
'Project ID is required. Please select a project or enter a project ID manually.'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
// Parse comma-separated strings into arrays
|
||||||
|
const parseCommaSeparated = (value: string | undefined): string[] | undefined => {
|
||||||
|
if (!value || value.trim() === '') return undefined
|
||||||
|
return value
|
||||||
|
.split(',')
|
||||||
|
.map((item) => item.trim())
|
||||||
|
.filter((item) => item !== '')
|
||||||
|
}
|
||||||
|
|
||||||
const writeParams = {
|
const writeParams = {
|
||||||
projectId: effectiveProjectId,
|
projectId: effectiveProjectId,
|
||||||
summary: params.summary || '',
|
summary: params.summary || '',
|
||||||
description: params.description || '',
|
description: params.description || '',
|
||||||
issueType: params.issueType || 'Task',
|
issueType: params.issueType || 'Task',
|
||||||
parent: params.parentIssue ? { key: params.parentIssue } : undefined,
|
parent: params.parentIssue ? { key: params.parentIssue } : undefined,
|
||||||
|
assignee: params.assignee || undefined,
|
||||||
|
priority: params.priority || undefined,
|
||||||
|
labels: parseCommaSeparated(params.labels),
|
||||||
|
duedate: params.duedate || undefined,
|
||||||
|
reporter: params.reporter || undefined,
|
||||||
|
environment: params.environment || undefined,
|
||||||
|
customFieldId: params.customFieldId || undefined,
|
||||||
|
customFieldValue: params.customFieldValue || undefined,
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
...baseParams,
|
...baseParams,
|
||||||
@@ -704,6 +812,16 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
accountId: params.accountId,
|
accountId: params.accountId,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
case 'get_users': {
|
||||||
|
return {
|
||||||
|
...baseParams,
|
||||||
|
accountId: params.userAccountId || undefined,
|
||||||
|
startAt: params.usersStartAt ? Number.parseInt(params.usersStartAt) : undefined,
|
||||||
|
maxResults: params.usersMaxResults
|
||||||
|
? Number.parseInt(params.usersMaxResults)
|
||||||
|
: undefined,
|
||||||
|
}
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return baseParams
|
return baseParams
|
||||||
}
|
}
|
||||||
@@ -722,6 +840,15 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
summary: { type: 'string', description: 'Issue summary' },
|
summary: { type: 'string', description: 'Issue summary' },
|
||||||
description: { type: 'string', description: 'Issue description' },
|
description: { type: 'string', description: 'Issue description' },
|
||||||
issueType: { type: 'string', description: 'Issue type' },
|
issueType: { type: 'string', description: 'Issue type' },
|
||||||
|
// Write operation additional inputs
|
||||||
|
assignee: { type: 'string', description: 'Assignee account ID' },
|
||||||
|
priority: { type: 'string', description: 'Priority ID or name' },
|
||||||
|
labels: { type: 'string', description: 'Comma-separated labels for the issue' },
|
||||||
|
duedate: { type: 'string', description: 'Due date in YYYY-MM-DD format' },
|
||||||
|
reporter: { type: 'string', description: 'Reporter account ID' },
|
||||||
|
environment: { type: 'string', description: 'Environment information' },
|
||||||
|
customFieldId: { type: 'string', description: 'Custom field ID (e.g., customfield_10001)' },
|
||||||
|
customFieldValue: { type: 'string', description: 'Value for the custom field' },
|
||||||
// Delete operation inputs
|
// Delete operation inputs
|
||||||
deleteSubtasks: { type: 'string', description: 'Whether to delete subtasks (true/false)' },
|
deleteSubtasks: { type: 'string', description: 'Whether to delete subtasks (true/false)' },
|
||||||
// Assign/Watcher operation inputs
|
// Assign/Watcher operation inputs
|
||||||
@@ -758,6 +885,13 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
linkType: { type: 'string', description: 'Type of link (e.g., "Blocks", "Relates")' },
|
linkType: { type: 'string', description: 'Type of link (e.g., "Blocks", "Relates")' },
|
||||||
linkComment: { type: 'string', description: 'Optional comment for issue link' },
|
linkComment: { type: 'string', description: 'Optional comment for issue link' },
|
||||||
linkId: { type: 'string', description: 'Link ID for delete operation' },
|
linkId: { type: 'string', description: 'Link ID for delete operation' },
|
||||||
|
// Get Users operation inputs
|
||||||
|
userAccountId: {
|
||||||
|
type: 'string',
|
||||||
|
description: 'Account ID for specific user lookup (optional)',
|
||||||
|
},
|
||||||
|
usersStartAt: { type: 'string', description: 'Pagination start index for users' },
|
||||||
|
usersMaxResults: { type: 'string', description: 'Maximum users to return' },
|
||||||
},
|
},
|
||||||
outputs: {
|
outputs: {
|
||||||
// Common outputs across all Jira operations
|
// Common outputs across all Jira operations
|
||||||
@@ -834,6 +968,12 @@ export const JiraBlock: BlockConfig<JiraResponse> = {
|
|||||||
// jira_add_watcher, jira_remove_watcher outputs
|
// jira_add_watcher, jira_remove_watcher outputs
|
||||||
watcherAccountId: { type: 'string', description: 'Watcher account ID' },
|
watcherAccountId: { type: 'string', description: 'Watcher account ID' },
|
||||||
|
|
||||||
|
// jira_get_users outputs
|
||||||
|
users: {
|
||||||
|
type: 'json',
|
||||||
|
description: 'Array of users with accountId, displayName, emailAddress, active status',
|
||||||
|
},
|
||||||
|
|
||||||
// jira_bulk_read outputs
|
// jira_bulk_read outputs
|
||||||
// Note: bulk_read returns an array in the output field, each item contains:
|
// Note: bulk_read returns an array in the output field, each item contains:
|
||||||
// ts, issueKey, summary, description, status, assignee, created, updated
|
// ts, issueKey, summary, description, status, assignee, created, updated
|
||||||
|
|||||||
@@ -134,7 +134,6 @@ export const MicrosoftPlannerBlock: BlockConfig<MicrosoftPlannerResponse> = {
|
|||||||
placeholder: 'Enter the bucket ID',
|
placeholder: 'Enter the bucket ID',
|
||||||
condition: { field: 'operation', value: ['read_bucket', 'update_bucket', 'delete_bucket'] },
|
condition: { field: 'operation', value: ['read_bucket', 'update_bucket', 'delete_bucket'] },
|
||||||
dependsOn: ['credential'],
|
dependsOn: ['credential'],
|
||||||
canonicalParamId: 'bucketId',
|
|
||||||
},
|
},
|
||||||
|
|
||||||
// ETag for update/delete operations
|
// ETag for update/delete operations
|
||||||
|
|||||||
@@ -181,7 +181,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'threadTs',
|
id: 'threadTs',
|
||||||
title: 'Thread Timestamp',
|
title: 'Thread Timestamp',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'thread_ts',
|
|
||||||
placeholder: 'Reply to thread (e.g., 1405894322.002768)',
|
placeholder: 'Reply to thread (e.g., 1405894322.002768)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -263,7 +262,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'channelLimit',
|
id: 'channelLimit',
|
||||||
title: 'Channel Limit',
|
title: 'Channel Limit',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'limit',
|
|
||||||
placeholder: '100',
|
placeholder: '100',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -275,7 +273,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'memberLimit',
|
id: 'memberLimit',
|
||||||
title: 'Member Limit',
|
title: 'Member Limit',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'limit',
|
|
||||||
placeholder: '100',
|
placeholder: '100',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -301,7 +298,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'userLimit',
|
id: 'userLimit',
|
||||||
title: 'User Limit',
|
title: 'User Limit',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'limit',
|
|
||||||
placeholder: '100',
|
placeholder: '100',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -358,7 +354,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'updateTimestamp',
|
id: 'updateTimestamp',
|
||||||
title: 'Message Timestamp',
|
title: 'Message Timestamp',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'timestamp',
|
|
||||||
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -382,7 +377,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'deleteTimestamp',
|
id: 'deleteTimestamp',
|
||||||
title: 'Message Timestamp',
|
title: 'Message Timestamp',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'timestamp',
|
|
||||||
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -395,7 +389,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'reactionTimestamp',
|
id: 'reactionTimestamp',
|
||||||
title: 'Message Timestamp',
|
title: 'Message Timestamp',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'timestamp',
|
|
||||||
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
placeholder: 'Message timestamp (e.g., 1405894322.002768)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -407,7 +400,6 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
id: 'emojiName',
|
id: 'emojiName',
|
||||||
title: 'Emoji Name',
|
title: 'Emoji Name',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
canonicalParamId: 'name',
|
|
||||||
placeholder: 'Emoji name without colons (e.g., thumbsup, heart, eyes)',
|
placeholder: 'Emoji name without colons (e.g., thumbsup, heart, eyes)',
|
||||||
condition: {
|
condition: {
|
||||||
field: 'operation',
|
field: 'operation',
|
||||||
@@ -554,47 +546,35 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
|||||||
baseParams.content = content
|
baseParams.content = content
|
||||||
break
|
break
|
||||||
|
|
||||||
case 'read':
|
case 'read': {
|
||||||
if (limit) {
|
const parsedLimit = limit ? Number.parseInt(limit, 10) : 10
|
||||||
const parsedLimit = Number.parseInt(limit, 10)
|
if (Number.isNaN(parsedLimit) || parsedLimit < 1 || parsedLimit > 15) {
|
||||||
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 10
|
throw new Error('Message limit must be between 1 and 15')
|
||||||
} else {
|
|
||||||
baseParams.limit = 10
|
|
||||||
}
|
}
|
||||||
|
baseParams.limit = parsedLimit
|
||||||
if (oldest) {
|
if (oldest) {
|
||||||
baseParams.oldest = oldest
|
baseParams.oldest = oldest
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'list_channels':
|
case 'list_channels': {
|
||||||
baseParams.includePrivate = includePrivate !== 'false'
|
baseParams.includePrivate = includePrivate !== 'false'
|
||||||
baseParams.excludeArchived = true
|
baseParams.excludeArchived = true
|
||||||
if (channelLimit) {
|
baseParams.limit = channelLimit ? Number.parseInt(channelLimit, 10) : 100
|
||||||
const parsedLimit = Number.parseInt(channelLimit, 10)
|
|
||||||
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
|
|
||||||
} else {
|
|
||||||
baseParams.limit = 100
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'list_members':
|
case 'list_members': {
|
||||||
if (memberLimit) {
|
baseParams.limit = memberLimit ? Number.parseInt(memberLimit, 10) : 100
|
||||||
const parsedLimit = Number.parseInt(memberLimit, 10)
|
|
||||||
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
|
|
||||||
} else {
|
|
||||||
baseParams.limit = 100
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'list_users':
|
case 'list_users': {
|
||||||
baseParams.includeDeleted = includeDeleted === 'true'
|
baseParams.includeDeleted = includeDeleted === 'true'
|
||||||
if (userLimit) {
|
baseParams.limit = userLimit ? Number.parseInt(userLimit, 10) : 100
|
||||||
const parsedLimit = Number.parseInt(userLimit, 10)
|
|
||||||
baseParams.limit = !Number.isNaN(parsedLimit) ? parsedLimit : 100
|
|
||||||
} else {
|
|
||||||
baseParams.limit = 100
|
|
||||||
}
|
|
||||||
break
|
break
|
||||||
|
}
|
||||||
|
|
||||||
case 'get_user':
|
case 'get_user':
|
||||||
if (!userId) {
|
if (!userId) {
|
||||||
|
|||||||
@@ -70,17 +70,6 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
|
|||||||
title: 'Task ID',
|
title: 'Task ID',
|
||||||
type: 'short-input',
|
type: 'short-input',
|
||||||
placeholder: 'Enter Task ID',
|
placeholder: 'Enter Task ID',
|
||||||
mode: 'basic',
|
|
||||||
canonicalParamId: 'taskId',
|
|
||||||
condition: { field: 'operation', value: ['read_task'] },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
id: 'manualTaskId',
|
|
||||||
title: 'Task ID',
|
|
||||||
type: 'short-input',
|
|
||||||
canonicalParamId: 'taskId',
|
|
||||||
placeholder: 'Enter Task ID',
|
|
||||||
mode: 'advanced',
|
|
||||||
condition: { field: 'operation', value: ['read_task'] },
|
condition: { field: 'operation', value: ['read_task'] },
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -167,12 +156,9 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
params: (params) => {
|
params: (params) => {
|
||||||
const { credential, operation, contactId, manualContactId, taskId, manualTaskId, ...rest } =
|
const { credential, operation, contactId, manualContactId, taskId, ...rest } = params
|
||||||
params
|
|
||||||
|
|
||||||
// Handle both selector and manual inputs
|
|
||||||
const effectiveContactId = (contactId || manualContactId || '').trim()
|
const effectiveContactId = (contactId || manualContactId || '').trim()
|
||||||
const effectiveTaskId = (taskId || manualTaskId || '').trim()
|
|
||||||
|
|
||||||
const baseParams = {
|
const baseParams = {
|
||||||
...rest,
|
...rest,
|
||||||
@@ -225,7 +211,6 @@ export const WealthboxBlock: BlockConfig<WealthboxResponse> = {
|
|||||||
contactId: { type: 'string', description: 'Contact identifier' },
|
contactId: { type: 'string', description: 'Contact identifier' },
|
||||||
manualContactId: { type: 'string', description: 'Manual contact identifier' },
|
manualContactId: { type: 'string', description: 'Manual contact identifier' },
|
||||||
taskId: { type: 'string', description: 'Task identifier' },
|
taskId: { type: 'string', description: 'Task identifier' },
|
||||||
manualTaskId: { type: 'string', description: 'Manual task identifier' },
|
|
||||||
content: { type: 'string', description: 'Content text' },
|
content: { type: 'string', description: 'Content text' },
|
||||||
firstName: { type: 'string', description: 'First name' },
|
firstName: { type: 'string', description: 'First name' },
|
||||||
lastName: { type: 'string', description: 'Last name' },
|
lastName: { type: 'string', description: 'Last name' },
|
||||||
|
|||||||
@@ -138,6 +138,7 @@ export const env = createEnv({
|
|||||||
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
|
S3_CHAT_BUCKET_NAME: z.string().optional(), // S3 bucket for chat logos
|
||||||
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
|
S3_COPILOT_BUCKET_NAME: z.string().optional(), // S3 bucket for copilot files
|
||||||
S3_PROFILE_PICTURES_BUCKET_NAME: z.string().optional(), // S3 bucket for profile pictures
|
S3_PROFILE_PICTURES_BUCKET_NAME: z.string().optional(), // S3 bucket for profile pictures
|
||||||
|
S3_OG_IMAGES_BUCKET_NAME: z.string().optional(), // S3 bucket for OpenGraph images
|
||||||
|
|
||||||
// Cloud Storage - Azure Blob
|
// Cloud Storage - Azure Blob
|
||||||
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
|
AZURE_ACCOUNT_NAME: z.string().optional(), // Azure storage account name
|
||||||
@@ -149,6 +150,7 @@ export const env = createEnv({
|
|||||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
|
AZURE_STORAGE_CHAT_CONTAINER_NAME: z.string().optional(), // Azure container for chat logos
|
||||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
|
AZURE_STORAGE_COPILOT_CONTAINER_NAME: z.string().optional(), // Azure container for copilot files
|
||||||
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: z.string().optional(), // Azure container for profile pictures
|
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: z.string().optional(), // Azure container for profile pictures
|
||||||
|
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: z.string().optional(), // Azure container for OpenGraph images
|
||||||
|
|
||||||
// Data Retention
|
// Data Retention
|
||||||
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
|
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(), // Log retention days for free plan users
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import { env, getEnv } from '../config/env'
|
import { env, getEnv } from '../config/env'
|
||||||
|
import { isDev } from '../config/feature-flags'
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Content Security Policy (CSP) configuration builder
|
* Content Security Policy (CSP) configuration builder
|
||||||
@@ -79,10 +80,16 @@ export const buildTimeCSPDirectives: CSPDirectives = {
|
|||||||
'connect-src': [
|
'connect-src': [
|
||||||
"'self'",
|
"'self'",
|
||||||
env.NEXT_PUBLIC_APP_URL || '',
|
env.NEXT_PUBLIC_APP_URL || '',
|
||||||
env.OLLAMA_URL || 'http://localhost:11434',
|
// Only include localhost fallbacks in development mode
|
||||||
env.NEXT_PUBLIC_SOCKET_URL || 'http://localhost:3002',
|
...(env.OLLAMA_URL ? [env.OLLAMA_URL] : isDev ? ['http://localhost:11434'] : []),
|
||||||
env.NEXT_PUBLIC_SOCKET_URL?.replace('http://', 'ws://').replace('https://', 'wss://') ||
|
...(env.NEXT_PUBLIC_SOCKET_URL
|
||||||
'ws://localhost:3002',
|
? [
|
||||||
|
env.NEXT_PUBLIC_SOCKET_URL,
|
||||||
|
env.NEXT_PUBLIC_SOCKET_URL.replace('http://', 'ws://').replace('https://', 'wss://'),
|
||||||
|
]
|
||||||
|
: isDev
|
||||||
|
? ['http://localhost:3002', 'ws://localhost:3002']
|
||||||
|
: []),
|
||||||
'https://api.browser-use.com',
|
'https://api.browser-use.com',
|
||||||
'https://api.exa.ai',
|
'https://api.exa.ai',
|
||||||
'https://api.firecrawl.dev',
|
'https://api.firecrawl.dev',
|
||||||
@@ -128,11 +135,16 @@ export function buildCSPString(directives: CSPDirectives): string {
|
|||||||
* This maintains compatibility with existing inline scripts while fixing Docker env var issues
|
* This maintains compatibility with existing inline scripts while fixing Docker env var issues
|
||||||
*/
|
*/
|
||||||
export function generateRuntimeCSP(): string {
|
export function generateRuntimeCSP(): string {
|
||||||
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || 'http://localhost:3002'
|
|
||||||
const socketWsUrl =
|
|
||||||
socketUrl.replace('http://', 'ws://').replace('https://', 'wss://') || 'ws://localhost:3002'
|
|
||||||
const appUrl = getEnv('NEXT_PUBLIC_APP_URL') || ''
|
const appUrl = getEnv('NEXT_PUBLIC_APP_URL') || ''
|
||||||
const ollamaUrl = getEnv('OLLAMA_URL') || 'http://localhost:11434'
|
|
||||||
|
// Only include localhost URLs in development or when explicitly configured
|
||||||
|
const socketUrl = getEnv('NEXT_PUBLIC_SOCKET_URL') || (isDev ? 'http://localhost:3002' : '')
|
||||||
|
const socketWsUrl = socketUrl
|
||||||
|
? socketUrl.replace('http://', 'ws://').replace('https://', 'wss://')
|
||||||
|
: isDev
|
||||||
|
? 'ws://localhost:3002'
|
||||||
|
: ''
|
||||||
|
const ollamaUrl = getEnv('OLLAMA_URL') || (isDev ? 'http://localhost:11434' : '')
|
||||||
|
|
||||||
const brandLogoDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_LOGO_URL'))
|
const brandLogoDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_LOGO_URL'))
|
||||||
const brandFaviconDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_FAVICON_URL'))
|
const brandFaviconDomains = getHostnameFromUrl(getEnv('NEXT_PUBLIC_BRAND_FAVICON_URL'))
|
||||||
|
|||||||
@@ -958,3 +958,112 @@ export function createPinnedUrl(originalUrl: string, resolvedIP: string): string
|
|||||||
const port = parsed.port ? `:${parsed.port}` : ''
|
const port = parsed.port ? `:${parsed.port}` : ''
|
||||||
return `${parsed.protocol}//${resolvedIP}${port}${parsed.pathname}${parsed.search}`
|
return `${parsed.protocol}//${resolvedIP}${port}${parsed.pathname}${parsed.search}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validates a Google Calendar ID
|
||||||
|
*
|
||||||
|
* Google Calendar IDs can be:
|
||||||
|
* - "primary" (literal string for the user's primary calendar)
|
||||||
|
* - Email addresses (for user calendars)
|
||||||
|
* - Alphanumeric strings with hyphens, underscores, and dots (for other calendars)
|
||||||
|
*
|
||||||
|
* This validator allows these legitimate formats while blocking path traversal and injection attempts.
|
||||||
|
*
|
||||||
|
* @param value - The calendar ID to validate
|
||||||
|
* @param paramName - Name of the parameter for error messages
|
||||||
|
* @returns ValidationResult
|
||||||
|
*
|
||||||
|
* @example
|
||||||
|
* ```typescript
|
||||||
|
* const result = validateGoogleCalendarId(calendarId, 'calendarId')
|
||||||
|
* if (!result.isValid) {
|
||||||
|
* return NextResponse.json({ error: result.error }, { status: 400 })
|
||||||
|
* }
|
||||||
|
* ```
|
||||||
|
*/
|
||||||
|
export function validateGoogleCalendarId(
|
||||||
|
value: string | null | undefined,
|
||||||
|
paramName = 'calendarId'
|
||||||
|
): ValidationResult {
|
||||||
|
if (value === null || value === undefined || value === '') {
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} is required`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value === 'primary') {
|
||||||
|
return { isValid: true, sanitized: value }
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathTraversalPatterns = [
|
||||||
|
'../',
|
||||||
|
'..\\',
|
||||||
|
'%2e%2e%2f',
|
||||||
|
'%2e%2e/',
|
||||||
|
'..%2f',
|
||||||
|
'%2e%2e%5c',
|
||||||
|
'%2e%2e\\',
|
||||||
|
'..%5c',
|
||||||
|
'%252e%252e%252f',
|
||||||
|
]
|
||||||
|
|
||||||
|
const lowerValue = value.toLowerCase()
|
||||||
|
for (const pattern of pathTraversalPatterns) {
|
||||||
|
if (lowerValue.includes(pattern)) {
|
||||||
|
logger.warn('Path traversal attempt in Google Calendar ID', {
|
||||||
|
paramName,
|
||||||
|
value: value.substring(0, 100),
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} contains invalid path traversal sequence`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (/[\x00-\x1f\x7f]/.test(value) || value.includes('%00')) {
|
||||||
|
logger.warn('Control characters in Google Calendar ID', { paramName })
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} contains invalid control characters`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.includes('\n') || value.includes('\r')) {
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} contains invalid newline characters`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const emailPattern = /^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$/
|
||||||
|
if (emailPattern.test(value)) {
|
||||||
|
return { isValid: true, sanitized: value }
|
||||||
|
}
|
||||||
|
|
||||||
|
const calendarIdPattern = /^[a-zA-Z0-9._@%#+-]+$/
|
||||||
|
if (!calendarIdPattern.test(value)) {
|
||||||
|
logger.warn('Invalid Google Calendar ID format', {
|
||||||
|
paramName,
|
||||||
|
value: value.substring(0, 100),
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} format is invalid. Must be "primary", an email address, or an alphanumeric ID`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (value.length > 255) {
|
||||||
|
logger.warn('Google Calendar ID exceeds maximum length', {
|
||||||
|
paramName,
|
||||||
|
length: value.length,
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
isValid: false,
|
||||||
|
error: `${paramName} exceeds maximum length of 255 characters`,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { isValid: true, sanitized: value }
|
||||||
|
}
|
||||||
|
|||||||
@@ -79,10 +79,8 @@ export function hasEmailService(): boolean {
|
|||||||
|
|
||||||
export async function sendEmail(options: EmailOptions): Promise<SendEmailResult> {
|
export async function sendEmail(options: EmailOptions): Promise<SendEmailResult> {
|
||||||
try {
|
try {
|
||||||
// Check if user has unsubscribed (skip for critical transactional emails)
|
|
||||||
if (options.emailType !== 'transactional') {
|
if (options.emailType !== 'transactional') {
|
||||||
const unsubscribeType = options.emailType as 'marketing' | 'updates' | 'notifications'
|
const unsubscribeType = options.emailType as 'marketing' | 'updates' | 'notifications'
|
||||||
// For arrays, check the first email address (batch emails typically go to similar recipients)
|
|
||||||
const primaryEmail = Array.isArray(options.to) ? options.to[0] : options.to
|
const primaryEmail = Array.isArray(options.to) ? options.to[0] : options.to
|
||||||
const hasUnsubscribed = await isUnsubscribed(primaryEmail, unsubscribeType)
|
const hasUnsubscribed = await isUnsubscribed(primaryEmail, unsubscribeType)
|
||||||
if (hasUnsubscribed) {
|
if (hasUnsubscribed) {
|
||||||
@@ -99,10 +97,8 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Process email data with unsubscribe tokens and headers
|
|
||||||
const processedData = await processEmailData(options)
|
const processedData = await processEmailData(options)
|
||||||
|
|
||||||
// Try Resend first if configured
|
|
||||||
if (resend) {
|
if (resend) {
|
||||||
try {
|
try {
|
||||||
return await sendWithResend(processedData)
|
return await sendWithResend(processedData)
|
||||||
@@ -111,7 +107,6 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to Azure Communication Services if configured
|
|
||||||
if (azureEmailClient) {
|
if (azureEmailClient) {
|
||||||
try {
|
try {
|
||||||
return await sendWithAzure(processedData)
|
return await sendWithAzure(processedData)
|
||||||
@@ -124,7 +119,6 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// No email service configured
|
|
||||||
logger.info('Email not sent (no email service configured):', {
|
logger.info('Email not sent (no email service configured):', {
|
||||||
to: options.to,
|
to: options.to,
|
||||||
subject: options.subject,
|
subject: options.subject,
|
||||||
@@ -144,6 +138,32 @@ export async function sendEmail(options: EmailOptions): Promise<SendEmailResult>
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface UnsubscribeData {
|
||||||
|
headers: Record<string, string>
|
||||||
|
html?: string
|
||||||
|
text?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
function addUnsubscribeData(
|
||||||
|
recipientEmail: string,
|
||||||
|
emailType: string,
|
||||||
|
html?: string,
|
||||||
|
text?: string
|
||||||
|
): UnsubscribeData {
|
||||||
|
const unsubscribeToken = generateUnsubscribeToken(recipientEmail, emailType)
|
||||||
|
const baseUrl = getBaseUrl()
|
||||||
|
const unsubscribeUrl = `${baseUrl}/unsubscribe?token=${unsubscribeToken}&email=${encodeURIComponent(recipientEmail)}`
|
||||||
|
|
||||||
|
return {
|
||||||
|
headers: {
|
||||||
|
'List-Unsubscribe': `<${unsubscribeUrl}>`,
|
||||||
|
'List-Unsubscribe-Post': 'List-Unsubscribe=One-Click',
|
||||||
|
},
|
||||||
|
html: html?.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken),
|
||||||
|
text: text?.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
async function processEmailData(options: EmailOptions): Promise<ProcessedEmailData> {
|
async function processEmailData(options: EmailOptions): Promise<ProcessedEmailData> {
|
||||||
const {
|
const {
|
||||||
to,
|
to,
|
||||||
@@ -159,27 +179,16 @@ async function processEmailData(options: EmailOptions): Promise<ProcessedEmailDa
|
|||||||
|
|
||||||
const senderEmail = from || getFromEmailAddress()
|
const senderEmail = from || getFromEmailAddress()
|
||||||
|
|
||||||
// Generate unsubscribe token and add to content
|
|
||||||
let finalHtml = html
|
let finalHtml = html
|
||||||
let finalText = text
|
let finalText = text
|
||||||
const headers: Record<string, string> = {}
|
let headers: Record<string, string> = {}
|
||||||
|
|
||||||
if (includeUnsubscribe && emailType !== 'transactional') {
|
if (includeUnsubscribe && emailType !== 'transactional') {
|
||||||
// For arrays, use the first email for unsubscribe (batch emails typically go to similar recipients)
|
|
||||||
const primaryEmail = Array.isArray(to) ? to[0] : to
|
const primaryEmail = Array.isArray(to) ? to[0] : to
|
||||||
const unsubscribeToken = generateUnsubscribeToken(primaryEmail, emailType)
|
const unsubData = addUnsubscribeData(primaryEmail, emailType, html, text)
|
||||||
const baseUrl = getBaseUrl()
|
headers = unsubData.headers
|
||||||
const unsubscribeUrl = `${baseUrl}/unsubscribe?token=${unsubscribeToken}&email=${encodeURIComponent(primaryEmail)}`
|
finalHtml = unsubData.html
|
||||||
|
finalText = unsubData.text
|
||||||
headers['List-Unsubscribe'] = `<${unsubscribeUrl}>`
|
|
||||||
headers['List-Unsubscribe-Post'] = 'List-Unsubscribe=One-Click'
|
|
||||||
|
|
||||||
if (html) {
|
|
||||||
finalHtml = html.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken)
|
|
||||||
}
|
|
||||||
if (text) {
|
|
||||||
finalText = text.replace(/\{\{UNSUBSCRIBE_TOKEN\}\}/g, unsubscribeToken)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@@ -234,13 +243,10 @@ async function sendWithResend(data: ProcessedEmailData): Promise<SendEmailResult
|
|||||||
async function sendWithAzure(data: ProcessedEmailData): Promise<SendEmailResult> {
|
async function sendWithAzure(data: ProcessedEmailData): Promise<SendEmailResult> {
|
||||||
if (!azureEmailClient) throw new Error('Azure Communication Services not configured')
|
if (!azureEmailClient) throw new Error('Azure Communication Services not configured')
|
||||||
|
|
||||||
// Azure Communication Services requires at least one content type
|
|
||||||
if (!data.html && !data.text) {
|
if (!data.html && !data.text) {
|
||||||
throw new Error('Azure Communication Services requires either HTML or text content')
|
throw new Error('Azure Communication Services requires either HTML or text content')
|
||||||
}
|
}
|
||||||
|
|
||||||
// For Azure, use just the email address part (no display name)
|
|
||||||
// Azure will use the display name configured in the portal for the sender address
|
|
||||||
const senderEmailOnly = data.senderEmail.includes('<')
|
const senderEmailOnly = data.senderEmail.includes('<')
|
||||||
? data.senderEmail.match(/<(.+)>/)?.[1] || data.senderEmail
|
? data.senderEmail.match(/<(.+)>/)?.[1] || data.senderEmail
|
||||||
: data.senderEmail
|
: data.senderEmail
|
||||||
@@ -281,7 +287,6 @@ export async function sendBatchEmails(options: BatchEmailOptions): Promise<Batch
|
|||||||
try {
|
try {
|
||||||
const results: SendEmailResult[] = []
|
const results: SendEmailResult[] = []
|
||||||
|
|
||||||
// Try Resend first for batch emails if available
|
|
||||||
if (resend) {
|
if (resend) {
|
||||||
try {
|
try {
|
||||||
return await sendBatchWithResend(options.emails)
|
return await sendBatchWithResend(options.emails)
|
||||||
@@ -290,7 +295,6 @@ export async function sendBatchEmails(options: BatchEmailOptions): Promise<Batch
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fallback to individual sends (works with both Azure and Resend)
|
|
||||||
logger.info('Sending batch emails individually')
|
logger.info('Sending batch emails individually')
|
||||||
for (const email of options.emails) {
|
for (const email of options.emails) {
|
||||||
try {
|
try {
|
||||||
@@ -328,17 +332,57 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
|
|||||||
if (!resend) throw new Error('Resend not configured')
|
if (!resend) throw new Error('Resend not configured')
|
||||||
|
|
||||||
const results: SendEmailResult[] = []
|
const results: SendEmailResult[] = []
|
||||||
const batchEmails = emails.map((email) => {
|
const skippedIndices: number[] = []
|
||||||
|
const batchEmails: any[] = []
|
||||||
|
|
||||||
|
for (let i = 0; i < emails.length; i++) {
|
||||||
|
const email = emails[i]
|
||||||
|
const { emailType = 'transactional', includeUnsubscribe = true } = email
|
||||||
|
|
||||||
|
if (emailType !== 'transactional') {
|
||||||
|
const unsubscribeType = emailType as 'marketing' | 'updates' | 'notifications'
|
||||||
|
const primaryEmail = Array.isArray(email.to) ? email.to[0] : email.to
|
||||||
|
const hasUnsubscribed = await isUnsubscribed(primaryEmail, unsubscribeType)
|
||||||
|
if (hasUnsubscribed) {
|
||||||
|
skippedIndices.push(i)
|
||||||
|
results.push({
|
||||||
|
success: true,
|
||||||
|
message: 'Email skipped (user unsubscribed)',
|
||||||
|
data: { id: 'skipped-unsubscribed' },
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const senderEmail = email.from || getFromEmailAddress()
|
const senderEmail = email.from || getFromEmailAddress()
|
||||||
const emailData: any = {
|
const emailData: any = {
|
||||||
from: senderEmail,
|
from: senderEmail,
|
||||||
to: email.to,
|
to: email.to,
|
||||||
subject: email.subject,
|
subject: email.subject,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (email.html) emailData.html = email.html
|
if (email.html) emailData.html = email.html
|
||||||
if (email.text) emailData.text = email.text
|
if (email.text) emailData.text = email.text
|
||||||
return emailData
|
|
||||||
})
|
if (includeUnsubscribe && emailType !== 'transactional') {
|
||||||
|
const primaryEmail = Array.isArray(email.to) ? email.to[0] : email.to
|
||||||
|
const unsubData = addUnsubscribeData(primaryEmail, emailType, email.html, email.text)
|
||||||
|
emailData.headers = unsubData.headers
|
||||||
|
if (unsubData.html) emailData.html = unsubData.html
|
||||||
|
if (unsubData.text) emailData.text = unsubData.text
|
||||||
|
}
|
||||||
|
|
||||||
|
batchEmails.push(emailData)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (batchEmails.length === 0) {
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
message: 'All batch emails skipped (users unsubscribed)',
|
||||||
|
results,
|
||||||
|
data: { count: 0 },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const response = await resend.batch.send(batchEmails as any)
|
const response = await resend.batch.send(batchEmails as any)
|
||||||
@@ -347,7 +391,6 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
|
|||||||
throw new Error(response.error.message || 'Resend batch API error')
|
throw new Error(response.error.message || 'Resend batch API error')
|
||||||
}
|
}
|
||||||
|
|
||||||
// Success - create results for each email
|
|
||||||
batchEmails.forEach((_, index) => {
|
batchEmails.forEach((_, index) => {
|
||||||
results.push({
|
results.push({
|
||||||
success: true,
|
success: true,
|
||||||
@@ -358,12 +401,15 @@ async function sendBatchWithResend(emails: EmailOptions[]): Promise<BatchSendEma
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
message: 'All batch emails sent successfully via Resend',
|
message:
|
||||||
|
skippedIndices.length > 0
|
||||||
|
? `${batchEmails.length} emails sent, ${skippedIndices.length} skipped (unsubscribed)`
|
||||||
|
: 'All batch emails sent successfully via Resend',
|
||||||
results,
|
results,
|
||||||
data: { count: results.length },
|
data: { count: batchEmails.length },
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Resend batch send failed:', error)
|
logger.error('Resend batch send failed:', error)
|
||||||
throw error // Let the caller handle fallback
|
throw error
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
130
apps/sim/lib/og/capture-preview.ts
Normal file
130
apps/sim/lib/og/capture-preview.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import { toPng } from 'html-to-image'
|
||||||
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
|
const logger = createLogger('OGCapturePreview')
|
||||||
|
|
||||||
|
/**
|
||||||
|
* OG image dimensions following social media best practices
|
||||||
|
*/
|
||||||
|
export const OG_IMAGE_WIDTH = 1200
|
||||||
|
export const OG_IMAGE_HEIGHT = 630
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Capture a workflow preview element as a PNG image for OpenGraph.
|
||||||
|
* Returns a base64-encoded data URL.
|
||||||
|
*
|
||||||
|
* @param element - The DOM element containing the workflow preview
|
||||||
|
* @param retries - Number of retry attempts (default: 3)
|
||||||
|
* @returns Base64 data URL of the captured image, or null if capture fails
|
||||||
|
*/
|
||||||
|
export async function captureWorkflowPreview(
|
||||||
|
element: HTMLElement,
|
||||||
|
retries = 3
|
||||||
|
): Promise<string | null> {
|
||||||
|
if (!element || element.children.length === 0) {
|
||||||
|
logger.warn('Cannot capture empty element')
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let attempt = 1; attempt <= retries; attempt++) {
|
||||||
|
try {
|
||||||
|
logger.info(`Capturing workflow preview for OG image (attempt ${attempt}/${retries})`)
|
||||||
|
|
||||||
|
const dataUrl = await toPng(element, {
|
||||||
|
width: OG_IMAGE_WIDTH,
|
||||||
|
height: OG_IMAGE_HEIGHT,
|
||||||
|
pixelRatio: 2, // Higher quality for crisp rendering
|
||||||
|
backgroundColor: '#0c0c0c', // Dark background matching the app theme
|
||||||
|
style: {
|
||||||
|
transform: 'scale(1)',
|
||||||
|
transformOrigin: 'top left',
|
||||||
|
},
|
||||||
|
filter: (node) => {
|
||||||
|
const className = node.className?.toString() || ''
|
||||||
|
if (
|
||||||
|
className.includes('tooltip') ||
|
||||||
|
className.includes('popover') ||
|
||||||
|
className.includes('overlay') ||
|
||||||
|
className.includes('react-flow__controls') ||
|
||||||
|
className.includes('react-flow__minimap')
|
||||||
|
) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (dataUrl && dataUrl.length > 1000) {
|
||||||
|
logger.info('Workflow preview captured successfully')
|
||||||
|
return dataUrl
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.warn(`Captured image appears to be empty (attempt ${attempt})`)
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Failed to capture workflow preview (attempt ${attempt}):`, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attempt < retries) {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 500 * attempt))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.error('All capture attempts failed')
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upload a captured OG image to the server.
|
||||||
|
*
|
||||||
|
* @param templateId - The ID of the template to associate the image with
|
||||||
|
* @param imageData - Base64-encoded image data URL
|
||||||
|
* @returns The public URL of the uploaded image, or null if upload fails
|
||||||
|
*/
|
||||||
|
export async function uploadOGImage(templateId: string, imageData: string): Promise<string | null> {
|
||||||
|
try {
|
||||||
|
logger.info(`Uploading OG image for template: ${templateId}`)
|
||||||
|
|
||||||
|
const response = await fetch(`/api/templates/${templateId}/og-image`, {
|
||||||
|
method: 'PUT',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ imageData }),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
const errorData = await response.json().catch(() => ({}))
|
||||||
|
throw new Error(errorData.error || `Upload failed with status ${response.status}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
logger.info(`OG image uploaded successfully: ${data.ogImageUrl}`)
|
||||||
|
|
||||||
|
return data.ogImageUrl
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to upload OG image:', error)
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Capture and upload a workflow preview as an OG image.
|
||||||
|
* This is a convenience function that combines capture and upload.
|
||||||
|
*
|
||||||
|
* @param element - The DOM element containing the workflow preview
|
||||||
|
* @param templateId - The ID of the template
|
||||||
|
* @returns The public URL of the uploaded image, or null if either step fails
|
||||||
|
*/
|
||||||
|
export async function captureAndUploadOGImage(
|
||||||
|
element: HTMLElement,
|
||||||
|
templateId: string
|
||||||
|
): Promise<string | null> {
|
||||||
|
const imageData = await captureWorkflowPreview(element)
|
||||||
|
|
||||||
|
if (!imageData) {
|
||||||
|
logger.warn('Skipping OG image upload - capture failed')
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
return uploadOGImage(templateId, imageData)
|
||||||
|
}
|
||||||
7
apps/sim/lib/og/index.ts
Normal file
7
apps/sim/lib/og/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export {
|
||||||
|
captureAndUploadOGImage,
|
||||||
|
captureWorkflowPreview,
|
||||||
|
OG_IMAGE_HEIGHT,
|
||||||
|
OG_IMAGE_WIDTH,
|
||||||
|
uploadOGImage,
|
||||||
|
} from './capture-preview'
|
||||||
121
apps/sim/lib/templates/permissions.ts
Normal file
121
apps/sim/lib/templates/permissions.ts
Normal file
@@ -0,0 +1,121 @@
|
|||||||
|
import { db } from '@sim/db'
|
||||||
|
import { member, templateCreators, templates, user } from '@sim/db/schema'
|
||||||
|
import { and, eq, or } from 'drizzle-orm'
|
||||||
|
|
||||||
|
export type CreatorPermissionLevel = 'member' | 'admin'
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies if a user is a super user.
|
||||||
|
*
|
||||||
|
* @param userId - The ID of the user to check
|
||||||
|
* @returns Object with isSuperUser boolean
|
||||||
|
*/
|
||||||
|
export async function verifySuperUser(userId: string): Promise<{ isSuperUser: boolean }> {
|
||||||
|
const [currentUser] = await db.select().from(user).where(eq(user.id, userId)).limit(1)
|
||||||
|
return { isSuperUser: currentUser?.isSuperUser || false }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches a template and verifies the user has permission to modify it.
|
||||||
|
* Combines template existence check and creator permission check in one call.
|
||||||
|
*
|
||||||
|
* @param templateId - The ID of the template
|
||||||
|
* @param userId - The ID of the user to check
|
||||||
|
* @param requiredLevel - The permission level required ('member' or 'admin')
|
||||||
|
* @returns Object with template data if authorized, or error information
|
||||||
|
*/
|
||||||
|
export async function verifyTemplateOwnership(
|
||||||
|
templateId: string,
|
||||||
|
userId: string,
|
||||||
|
requiredLevel: CreatorPermissionLevel = 'admin'
|
||||||
|
): Promise<{
|
||||||
|
authorized: boolean
|
||||||
|
template?: typeof templates.$inferSelect
|
||||||
|
error?: string
|
||||||
|
status?: number
|
||||||
|
}> {
|
||||||
|
const [template] = await db.select().from(templates).where(eq(templates.id, templateId)).limit(1)
|
||||||
|
|
||||||
|
if (!template) {
|
||||||
|
return { authorized: false, error: 'Template not found', status: 404 }
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!template.creatorId) {
|
||||||
|
return { authorized: false, error: 'Access denied', status: 403 }
|
||||||
|
}
|
||||||
|
|
||||||
|
const { hasPermission, error } = await verifyCreatorPermission(
|
||||||
|
userId,
|
||||||
|
template.creatorId,
|
||||||
|
requiredLevel
|
||||||
|
)
|
||||||
|
|
||||||
|
if (!hasPermission) {
|
||||||
|
return { authorized: false, error: error || 'Access denied', status: 403 }
|
||||||
|
}
|
||||||
|
|
||||||
|
return { authorized: true, template }
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Verifies if a user has permission to act on behalf of a creator profile.
|
||||||
|
*
|
||||||
|
* @param userId - The ID of the user to check
|
||||||
|
* @param creatorId - The ID of the creator profile
|
||||||
|
* @param requiredLevel - The permission level required ('member' for any org member, 'admin' for admin/owner only)
|
||||||
|
* @returns Object with hasPermission boolean and optional error message
|
||||||
|
*/
|
||||||
|
export async function verifyCreatorPermission(
|
||||||
|
userId: string,
|
||||||
|
creatorId: string,
|
||||||
|
requiredLevel: CreatorPermissionLevel = 'admin'
|
||||||
|
): Promise<{ hasPermission: boolean; error?: string }> {
|
||||||
|
const creatorProfile = await db
|
||||||
|
.select()
|
||||||
|
.from(templateCreators)
|
||||||
|
.where(eq(templateCreators.id, creatorId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (creatorProfile.length === 0) {
|
||||||
|
return { hasPermission: false, error: 'Creator profile not found' }
|
||||||
|
}
|
||||||
|
|
||||||
|
const creator = creatorProfile[0]
|
||||||
|
|
||||||
|
if (creator.referenceType === 'user') {
|
||||||
|
const hasPermission = creator.referenceId === userId
|
||||||
|
return {
|
||||||
|
hasPermission,
|
||||||
|
error: hasPermission ? undefined : 'You do not have permission to use this creator profile',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (creator.referenceType === 'organization') {
|
||||||
|
const membershipConditions = [
|
||||||
|
eq(member.userId, userId),
|
||||||
|
eq(member.organizationId, creator.referenceId),
|
||||||
|
]
|
||||||
|
|
||||||
|
if (requiredLevel === 'admin') {
|
||||||
|
membershipConditions.push(or(eq(member.role, 'admin'), eq(member.role, 'owner'))!)
|
||||||
|
}
|
||||||
|
|
||||||
|
const membership = await db
|
||||||
|
.select()
|
||||||
|
.from(member)
|
||||||
|
.where(and(...membershipConditions))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (membership.length === 0) {
|
||||||
|
const error =
|
||||||
|
requiredLevel === 'admin'
|
||||||
|
? 'You must be an admin or owner of the organization to perform this action'
|
||||||
|
: 'You must be a member of the organization to use its creator profile'
|
||||||
|
return { hasPermission: false, error }
|
||||||
|
}
|
||||||
|
|
||||||
|
return { hasPermission: true }
|
||||||
|
}
|
||||||
|
|
||||||
|
return { hasPermission: false, error: 'Unknown creator profile type' }
|
||||||
|
}
|
||||||
@@ -85,6 +85,18 @@ export const BLOB_PROFILE_PICTURES_CONFIG = {
|
|||||||
containerName: env.AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME || '',
|
containerName: env.AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME || '',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const S3_OG_IMAGES_CONFIG = {
|
||||||
|
bucket: env.S3_OG_IMAGES_BUCKET_NAME || '',
|
||||||
|
region: env.AWS_REGION || '',
|
||||||
|
}
|
||||||
|
|
||||||
|
export const BLOB_OG_IMAGES_CONFIG = {
|
||||||
|
accountName: env.AZURE_ACCOUNT_NAME || '',
|
||||||
|
accountKey: env.AZURE_ACCOUNT_KEY || '',
|
||||||
|
connectionString: env.AZURE_CONNECTION_STRING || '',
|
||||||
|
containerName: env.AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME || '',
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the current storage provider as a human-readable string
|
* Get the current storage provider as a human-readable string
|
||||||
*/
|
*/
|
||||||
@@ -151,6 +163,11 @@ function getS3Config(context: StorageContext): StorageConfig {
|
|||||||
bucket: S3_PROFILE_PICTURES_CONFIG.bucket,
|
bucket: S3_PROFILE_PICTURES_CONFIG.bucket,
|
||||||
region: S3_PROFILE_PICTURES_CONFIG.region,
|
region: S3_PROFILE_PICTURES_CONFIG.region,
|
||||||
}
|
}
|
||||||
|
case 'og-images':
|
||||||
|
return {
|
||||||
|
bucket: S3_OG_IMAGES_CONFIG.bucket || S3_CONFIG.bucket,
|
||||||
|
region: S3_OG_IMAGES_CONFIG.region || S3_CONFIG.region,
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return {
|
return {
|
||||||
bucket: S3_CONFIG.bucket,
|
bucket: S3_CONFIG.bucket,
|
||||||
@@ -206,6 +223,13 @@ function getBlobConfig(context: StorageContext): StorageConfig {
|
|||||||
connectionString: BLOB_PROFILE_PICTURES_CONFIG.connectionString,
|
connectionString: BLOB_PROFILE_PICTURES_CONFIG.connectionString,
|
||||||
containerName: BLOB_PROFILE_PICTURES_CONFIG.containerName,
|
containerName: BLOB_PROFILE_PICTURES_CONFIG.containerName,
|
||||||
}
|
}
|
||||||
|
case 'og-images':
|
||||||
|
return {
|
||||||
|
accountName: BLOB_OG_IMAGES_CONFIG.accountName || BLOB_CONFIG.accountName,
|
||||||
|
accountKey: BLOB_OG_IMAGES_CONFIG.accountKey || BLOB_CONFIG.accountKey,
|
||||||
|
connectionString: BLOB_OG_IMAGES_CONFIG.connectionString || BLOB_CONFIG.connectionString,
|
||||||
|
containerName: BLOB_OG_IMAGES_CONFIG.containerName || BLOB_CONFIG.containerName,
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
return {
|
return {
|
||||||
accountName: BLOB_CONFIG.accountName,
|
accountName: BLOB_CONFIG.accountName,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ export type StorageContext =
|
|||||||
| 'execution'
|
| 'execution'
|
||||||
| 'workspace'
|
| 'workspace'
|
||||||
| 'profile-pictures'
|
| 'profile-pictures'
|
||||||
|
| 'og-images'
|
||||||
| 'logs'
|
| 'logs'
|
||||||
|
|
||||||
export interface FileInfo {
|
export interface FileInfo {
|
||||||
|
|||||||
@@ -192,6 +192,15 @@ export function isSupportedVideoExtension(extension: string): extension is Suppo
|
|||||||
/**
|
/**
|
||||||
* Validate if an audio/video file type is supported for STT processing
|
* Validate if an audio/video file type is supported for STT processing
|
||||||
*/
|
*/
|
||||||
|
const PNG_MAGIC_BYTES = Buffer.from([0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a])
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate that a buffer contains valid PNG data by checking magic bytes
|
||||||
|
*/
|
||||||
|
export function isValidPng(buffer: Buffer): boolean {
|
||||||
|
return buffer.length >= 8 && buffer.subarray(0, 8).equals(PNG_MAGIC_BYTES)
|
||||||
|
}
|
||||||
|
|
||||||
export function validateMediaFileType(
|
export function validateMediaFileType(
|
||||||
fileName: string,
|
fileName: string,
|
||||||
mimeType: string
|
mimeType: string
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import { and, eq, sql } from 'drizzle-orm'
|
|||||||
import { nanoid } from 'nanoid'
|
import { nanoid } from 'nanoid'
|
||||||
import Parser from 'rss-parser'
|
import Parser from 'rss-parser'
|
||||||
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
import { pollingIdempotency } from '@/lib/core/idempotency/service'
|
||||||
|
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
|
|
||||||
@@ -156,7 +157,7 @@ export async function pollRssWebhooks() {
|
|||||||
const { feed, items: newItems } = await fetchNewRssItems(config, requestId)
|
const { feed, items: newItems } = await fetchNewRssItems(config, requestId)
|
||||||
|
|
||||||
if (!newItems.length) {
|
if (!newItems.length) {
|
||||||
await updateWebhookConfig(webhookId, config, now.toISOString(), [])
|
await updateWebhookConfig(webhookId, now.toISOString(), [])
|
||||||
await markWebhookSuccess(webhookId)
|
await markWebhookSuccess(webhookId)
|
||||||
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
logger.info(`[${requestId}] No new items found for webhook ${webhookId}`)
|
||||||
successCount++
|
successCount++
|
||||||
@@ -172,12 +173,11 @@ export async function pollRssWebhooks() {
|
|||||||
requestId
|
requestId
|
||||||
)
|
)
|
||||||
|
|
||||||
// Collect guids from processed items
|
|
||||||
const newGuids = newItems
|
const newGuids = newItems
|
||||||
.map((item) => item.guid || item.link || '')
|
.map((item) => item.guid || item.link || '')
|
||||||
.filter((guid) => guid.length > 0)
|
.filter((guid) => guid.length > 0)
|
||||||
|
|
||||||
await updateWebhookConfig(webhookId, config, now.toISOString(), newGuids)
|
await updateWebhookConfig(webhookId, now.toISOString(), newGuids)
|
||||||
|
|
||||||
if (itemFailedCount > 0 && processedCount === 0) {
|
if (itemFailedCount > 0 && processedCount === 0) {
|
||||||
await markWebhookFailed(webhookId)
|
await markWebhookFailed(webhookId)
|
||||||
@@ -245,15 +245,36 @@ async function fetchNewRssItems(
|
|||||||
try {
|
try {
|
||||||
logger.debug(`[${requestId}] Fetching RSS feed: ${config.feedUrl}`)
|
logger.debug(`[${requestId}] Fetching RSS feed: ${config.feedUrl}`)
|
||||||
|
|
||||||
// Parse the RSS feed
|
const urlValidation = await validateUrlWithDNS(config.feedUrl, 'feedUrl')
|
||||||
const feed = await parser.parseURL(config.feedUrl)
|
if (!urlValidation.isValid) {
|
||||||
|
logger.error(`[${requestId}] Invalid RSS feed URL: ${urlValidation.error}`)
|
||||||
|
throw new Error(`Invalid RSS feed URL: ${urlValidation.error}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const pinnedUrl = createPinnedUrl(config.feedUrl, urlValidation.resolvedIP!)
|
||||||
|
|
||||||
|
const response = await fetch(pinnedUrl, {
|
||||||
|
headers: {
|
||||||
|
Host: urlValidation.originalHostname!,
|
||||||
|
'User-Agent': 'SimStudio/1.0 RSS Poller',
|
||||||
|
Accept: 'application/rss+xml, application/xml, text/xml, */*',
|
||||||
|
},
|
||||||
|
signal: AbortSignal.timeout(30000),
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
throw new Error(`Failed to fetch RSS feed: ${response.status} ${response.statusText}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const xmlContent = await response.text()
|
||||||
|
|
||||||
|
const feed = await parser.parseString(xmlContent)
|
||||||
|
|
||||||
if (!feed.items || !feed.items.length) {
|
if (!feed.items || !feed.items.length) {
|
||||||
logger.debug(`[${requestId}] No items in feed`)
|
logger.debug(`[${requestId}] No items in feed`)
|
||||||
return { feed: feed as RssFeed, items: [] }
|
return { feed: feed as RssFeed, items: [] }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Filter new items based on timestamp and guids
|
|
||||||
const lastCheckedTime = config.lastCheckedTimestamp
|
const lastCheckedTime = config.lastCheckedTimestamp
|
||||||
? new Date(config.lastCheckedTimestamp)
|
? new Date(config.lastCheckedTimestamp)
|
||||||
: null
|
: null
|
||||||
@@ -262,12 +283,10 @@ async function fetchNewRssItems(
|
|||||||
const newItems = feed.items.filter((item) => {
|
const newItems = feed.items.filter((item) => {
|
||||||
const itemGuid = item.guid || item.link || ''
|
const itemGuid = item.guid || item.link || ''
|
||||||
|
|
||||||
// Check if we've already seen this item by guid
|
|
||||||
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
if (itemGuid && lastSeenGuids.has(itemGuid)) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if the item is newer than our last check
|
|
||||||
if (lastCheckedTime && item.isoDate) {
|
if (lastCheckedTime && item.isoDate) {
|
||||||
const itemDate = new Date(item.isoDate)
|
const itemDate = new Date(item.isoDate)
|
||||||
if (itemDate <= lastCheckedTime) {
|
if (itemDate <= lastCheckedTime) {
|
||||||
@@ -278,14 +297,12 @@ async function fetchNewRssItems(
|
|||||||
return true
|
return true
|
||||||
})
|
})
|
||||||
|
|
||||||
// Sort by date, newest first
|
|
||||||
newItems.sort((a, b) => {
|
newItems.sort((a, b) => {
|
||||||
const dateA = a.isoDate ? new Date(a.isoDate).getTime() : 0
|
const dateA = a.isoDate ? new Date(a.isoDate).getTime() : 0
|
||||||
const dateB = b.isoDate ? new Date(b.isoDate).getTime() : 0
|
const dateB = b.isoDate ? new Date(b.isoDate).getTime() : 0
|
||||||
return dateB - dateA
|
return dateB - dateA
|
||||||
})
|
})
|
||||||
|
|
||||||
// Limit to 25 items per poll to prevent overwhelming the system
|
|
||||||
const limitedItems = newItems.slice(0, 25)
|
const limitedItems = newItems.slice(0, 25)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -383,17 +400,11 @@ async function processRssItems(
|
|||||||
return { processedCount, failedCount }
|
return { processedCount, failedCount }
|
||||||
}
|
}
|
||||||
|
|
||||||
async function updateWebhookConfig(
|
async function updateWebhookConfig(webhookId: string, timestamp: string, newGuids: string[]) {
|
||||||
webhookId: string,
|
|
||||||
_config: RssWebhookConfig,
|
|
||||||
timestamp: string,
|
|
||||||
newGuids: string[]
|
|
||||||
) {
|
|
||||||
try {
|
try {
|
||||||
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
|
const result = await db.select().from(webhook).where(eq(webhook.id, webhookId))
|
||||||
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
|
const existingConfig = (result[0]?.providerConfig as Record<string, any>) || {}
|
||||||
|
|
||||||
// Merge new guids with existing ones, keeping only the most recent
|
|
||||||
const existingGuids = existingConfig.lastSeenGuids || []
|
const existingGuids = existingConfig.lastSeenGuids || []
|
||||||
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
const allGuids = [...newGuids, ...existingGuids].slice(0, MAX_GUIDS_TO_TRACK)
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { db } from '@sim/db'
|
|||||||
import { account, webhook } from '@sim/db/schema'
|
import { account, webhook } from '@sim/db/schema'
|
||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { type NextRequest, NextResponse } from 'next/server'
|
import { type NextRequest, NextResponse } from 'next/server'
|
||||||
|
import { createPinnedUrl, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||||
|
|
||||||
@@ -18,7 +19,6 @@ export async function handleWhatsAppVerification(
|
|||||||
challenge: string | null
|
challenge: string | null
|
||||||
): Promise<NextResponse | null> {
|
): Promise<NextResponse | null> {
|
||||||
if (mode && token && challenge) {
|
if (mode && token && challenge) {
|
||||||
// This is a WhatsApp verification request
|
|
||||||
logger.info(`[${requestId}] WhatsApp verification request received for path: ${path}`)
|
logger.info(`[${requestId}] WhatsApp verification request received for path: ${path}`)
|
||||||
|
|
||||||
if (mode !== 'subscribe') {
|
if (mode !== 'subscribe') {
|
||||||
@@ -26,13 +26,11 @@ export async function handleWhatsAppVerification(
|
|||||||
return new NextResponse('Invalid mode', { status: 400 })
|
return new NextResponse('Invalid mode', { status: 400 })
|
||||||
}
|
}
|
||||||
|
|
||||||
// Find all active WhatsApp webhooks
|
|
||||||
const webhooks = await db
|
const webhooks = await db
|
||||||
.select()
|
.select()
|
||||||
.from(webhook)
|
.from(webhook)
|
||||||
.where(and(eq(webhook.provider, 'whatsapp'), eq(webhook.isActive, true)))
|
.where(and(eq(webhook.provider, 'whatsapp'), eq(webhook.isActive, true)))
|
||||||
|
|
||||||
// Check if any webhook has a matching verification token
|
|
||||||
for (const wh of webhooks) {
|
for (const wh of webhooks) {
|
||||||
const providerConfig = (wh.providerConfig as Record<string, any>) || {}
|
const providerConfig = (wh.providerConfig as Record<string, any>) || {}
|
||||||
const verificationToken = providerConfig.verificationToken
|
const verificationToken = providerConfig.verificationToken
|
||||||
@@ -44,7 +42,6 @@ export async function handleWhatsAppVerification(
|
|||||||
|
|
||||||
if (token === verificationToken) {
|
if (token === verificationToken) {
|
||||||
logger.info(`[${requestId}] WhatsApp verification successful for webhook ${wh.id}`)
|
logger.info(`[${requestId}] WhatsApp verification successful for webhook ${wh.id}`)
|
||||||
// Return ONLY the challenge as plain text (exactly as WhatsApp expects)
|
|
||||||
return new NextResponse(challenge, {
|
return new NextResponse(challenge, {
|
||||||
status: 200,
|
status: 200,
|
||||||
headers: {
|
headers: {
|
||||||
@@ -72,6 +69,52 @@ export function handleSlackChallenge(body: any): NextResponse | null {
|
|||||||
return null
|
return null
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Fetches a URL with DNS pinning to prevent DNS rebinding attacks
|
||||||
|
* @param url - The URL to fetch
|
||||||
|
* @param accessToken - Authorization token (optional for pre-signed URLs)
|
||||||
|
* @param requestId - Request ID for logging
|
||||||
|
* @returns The fetch Response or null if validation fails
|
||||||
|
*/
|
||||||
|
async function fetchWithDNSPinning(
|
||||||
|
url: string,
|
||||||
|
accessToken: string,
|
||||||
|
requestId: string
|
||||||
|
): Promise<Response | null> {
|
||||||
|
try {
|
||||||
|
const urlValidation = await validateUrlWithDNS(url, 'contentUrl')
|
||||||
|
if (!urlValidation.isValid) {
|
||||||
|
logger.warn(`[${requestId}] Invalid content URL: ${urlValidation.error}`, {
|
||||||
|
url: url.substring(0, 100),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
|
||||||
|
const pinnedUrl = createPinnedUrl(url, urlValidation.resolvedIP!)
|
||||||
|
|
||||||
|
const headers: Record<string, string> = {
|
||||||
|
Host: urlValidation.originalHostname!,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accessToken) {
|
||||||
|
headers.Authorization = `Bearer ${accessToken}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await fetch(pinnedUrl, {
|
||||||
|
headers,
|
||||||
|
redirect: 'follow',
|
||||||
|
})
|
||||||
|
|
||||||
|
return response
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`[${requestId}] Error fetching URL with DNS pinning`, {
|
||||||
|
error: error instanceof Error ? error.message : String(error),
|
||||||
|
url: url.substring(0, 100),
|
||||||
|
})
|
||||||
|
return null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Format Microsoft Teams Graph change notification
|
* Format Microsoft Teams Graph change notification
|
||||||
*/
|
*/
|
||||||
@@ -90,7 +133,6 @@ async function formatTeamsGraphNotification(
|
|||||||
const resource = notification.resource || ''
|
const resource = notification.resource || ''
|
||||||
const subscriptionId = notification.subscriptionId || ''
|
const subscriptionId = notification.subscriptionId || ''
|
||||||
|
|
||||||
// Extract chatId and messageId from resource path
|
|
||||||
let chatId: string | null = null
|
let chatId: string | null = null
|
||||||
let messageId: string | null = null
|
let messageId: string | null = null
|
||||||
|
|
||||||
@@ -159,7 +201,6 @@ async function formatTeamsGraphNotification(
|
|||||||
[]
|
[]
|
||||||
let accessToken: string | null = null
|
let accessToken: string | null = null
|
||||||
|
|
||||||
// Teams chat subscriptions require credentials
|
|
||||||
if (!credentialId) {
|
if (!credentialId) {
|
||||||
logger.error('Missing credentialId for Teams chat subscription', {
|
logger.error('Missing credentialId for Teams chat subscription', {
|
||||||
chatId: resolvedChatId,
|
chatId: resolvedChatId,
|
||||||
@@ -170,11 +211,9 @@ async function formatTeamsGraphNotification(
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
// Get userId from credential
|
|
||||||
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
const rows = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
|
||||||
if (rows.length === 0) {
|
if (rows.length === 0) {
|
||||||
logger.error('Teams credential not found', { credentialId, chatId: resolvedChatId })
|
logger.error('Teams credential not found', { credentialId, chatId: resolvedChatId })
|
||||||
// Continue without message data
|
|
||||||
} else {
|
} else {
|
||||||
const effectiveUserId = rows[0].userId
|
const effectiveUserId = rows[0].userId
|
||||||
accessToken = await refreshAccessTokenIfNeeded(
|
accessToken = await refreshAccessTokenIfNeeded(
|
||||||
@@ -207,19 +246,20 @@ async function formatTeamsGraphNotification(
|
|||||||
|
|
||||||
if (contentUrl.includes('sharepoint.com') || contentUrl.includes('onedrive')) {
|
if (contentUrl.includes('sharepoint.com') || contentUrl.includes('onedrive')) {
|
||||||
try {
|
try {
|
||||||
const directRes = await fetch(contentUrl, {
|
const directRes = await fetchWithDNSPinning(
|
||||||
headers: { Authorization: `Bearer ${accessToken}` },
|
contentUrl,
|
||||||
redirect: 'follow',
|
accessToken,
|
||||||
})
|
'teams-attachment'
|
||||||
|
)
|
||||||
|
|
||||||
if (directRes.ok) {
|
if (directRes?.ok) {
|
||||||
const arrayBuffer = await directRes.arrayBuffer()
|
const arrayBuffer = await directRes.arrayBuffer()
|
||||||
buffer = Buffer.from(arrayBuffer)
|
buffer = Buffer.from(arrayBuffer)
|
||||||
mimeType =
|
mimeType =
|
||||||
directRes.headers.get('content-type') ||
|
directRes.headers.get('content-type') ||
|
||||||
contentTypeHint ||
|
contentTypeHint ||
|
||||||
'application/octet-stream'
|
'application/octet-stream'
|
||||||
} else {
|
} else if (directRes) {
|
||||||
const encodedUrl = Buffer.from(contentUrl)
|
const encodedUrl = Buffer.from(contentUrl)
|
||||||
.toString('base64')
|
.toString('base64')
|
||||||
.replace(/\+/g, '-')
|
.replace(/\+/g, '-')
|
||||||
@@ -310,9 +350,13 @@ async function formatTeamsGraphNotification(
|
|||||||
const downloadUrl = metadata['@microsoft.graph.downloadUrl']
|
const downloadUrl = metadata['@microsoft.graph.downloadUrl']
|
||||||
|
|
||||||
if (downloadUrl) {
|
if (downloadUrl) {
|
||||||
const downloadRes = await fetch(downloadUrl)
|
const downloadRes = await fetchWithDNSPinning(
|
||||||
|
downloadUrl,
|
||||||
|
'', // downloadUrl is a pre-signed URL, no auth needed
|
||||||
|
'teams-onedrive-download'
|
||||||
|
)
|
||||||
|
|
||||||
if (downloadRes.ok) {
|
if (downloadRes?.ok) {
|
||||||
const arrayBuffer = await downloadRes.arrayBuffer()
|
const arrayBuffer = await downloadRes.arrayBuffer()
|
||||||
buffer = Buffer.from(arrayBuffer)
|
buffer = Buffer.from(arrayBuffer)
|
||||||
mimeType =
|
mimeType =
|
||||||
@@ -336,10 +380,12 @@ async function formatTeamsGraphNotification(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
try {
|
try {
|
||||||
const ares = await fetch(contentUrl, {
|
const ares = await fetchWithDNSPinning(
|
||||||
headers: { Authorization: `Bearer ${accessToken}` },
|
contentUrl,
|
||||||
})
|
accessToken,
|
||||||
if (ares.ok) {
|
'teams-attachment-generic'
|
||||||
|
)
|
||||||
|
if (ares?.ok) {
|
||||||
const arrayBuffer = await ares.arrayBuffer()
|
const arrayBuffer = await ares.arrayBuffer()
|
||||||
buffer = Buffer.from(arrayBuffer)
|
buffer = Buffer.from(arrayBuffer)
|
||||||
mimeType =
|
mimeType =
|
||||||
@@ -377,7 +423,6 @@ async function formatTeamsGraphNotification(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If no message was fetched, return minimal data
|
|
||||||
if (!message) {
|
if (!message) {
|
||||||
logger.warn('No message data available for Teams notification', {
|
logger.warn('No message data available for Teams notification', {
|
||||||
chatId: resolvedChatId,
|
chatId: resolvedChatId,
|
||||||
@@ -413,8 +458,6 @@ async function formatTeamsGraphNotification(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Extract data from message - we know it exists now
|
|
||||||
// body.content is the HTML/text content, summary is a plain text preview (max 280 chars)
|
|
||||||
const messageText = message.body?.content || ''
|
const messageText = message.body?.content || ''
|
||||||
const from = message.from?.user || {}
|
const from = message.from?.user || {}
|
||||||
const createdAt = message.createdDateTime || ''
|
const createdAt = message.createdDateTime || ''
|
||||||
|
|||||||
@@ -225,6 +225,13 @@ export function getBlockOutputs(
|
|||||||
return getUnifiedStartOutputs(subBlocks)
|
return getUnifiedStartOutputs(subBlocks)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (blockType === 'human_in_the_loop') {
|
||||||
|
// For human_in_the_loop, only expose url (inputFormat fields are only available after resume)
|
||||||
|
return {
|
||||||
|
url: { type: 'string', description: 'Resume UI URL' },
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (blockType === 'approval') {
|
if (blockType === 'approval') {
|
||||||
// Start with only url (apiUrl commented out - not accessible as output)
|
// Start with only url (apiUrl commented out - not accessible as output)
|
||||||
const pauseResumeOutputs: Record<string, any> = {
|
const pauseResumeOutputs: Record<string, any> = {
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import { db } from '@sim/db'
|
import { db } from '@sim/db'
|
||||||
import { permissions, workflow as workflowTable, workspace } from '@sim/db/schema'
|
import { permissions, userStats, workflow as workflowTable, workspace } from '@sim/db/schema'
|
||||||
import type { InferSelectModel } from 'drizzle-orm'
|
import type { InferSelectModel } from 'drizzle-orm'
|
||||||
import { and, eq } from 'drizzle-orm'
|
import { and, eq } from 'drizzle-orm'
|
||||||
import { NextResponse } from 'next/server'
|
import { NextResponse } from 'next/server'
|
||||||
import { getSession } from '@/lib/auth'
|
import { getSession } from '@/lib/auth'
|
||||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
|
||||||
import { createLogger } from '@/lib/logs/console/logger'
|
import { createLogger } from '@/lib/logs/console/logger'
|
||||||
import type { PermissionType } from '@/lib/workspaces/permissions/utils'
|
import type { PermissionType } from '@/lib/workspaces/permissions/utils'
|
||||||
import type { ExecutionResult } from '@/executor/types'
|
import type { ExecutionResult } from '@/executor/types'
|
||||||
@@ -93,17 +92,44 @@ export async function updateWorkflowRunCounts(workflowId: string, runs = 1) {
|
|||||||
throw new Error(`Workflow ${workflowId} not found`)
|
throw new Error(`Workflow ${workflowId} not found`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use the API to update stats
|
await db
|
||||||
const response = await fetch(`${getBaseUrl()}/api/workflows/${workflowId}/stats?runs=${runs}`, {
|
.update(workflowTable)
|
||||||
method: 'POST',
|
.set({
|
||||||
})
|
runCount: workflow.runCount + runs,
|
||||||
|
lastRunAt: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(workflowTable.id, workflowId))
|
||||||
|
|
||||||
if (!response.ok) {
|
try {
|
||||||
const error = await response.json()
|
const existing = await db
|
||||||
throw new Error(error.error || 'Failed to update workflow stats')
|
.select()
|
||||||
|
.from(userStats)
|
||||||
|
.where(eq(userStats.userId, workflow.userId))
|
||||||
|
.limit(1)
|
||||||
|
|
||||||
|
if (existing.length === 0) {
|
||||||
|
logger.warn('User stats record not found - should be created during onboarding', {
|
||||||
|
userId: workflow.userId,
|
||||||
|
workflowId,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
await db
|
||||||
|
.update(userStats)
|
||||||
|
.set({
|
||||||
|
lastActive: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(userStats.userId, workflow.userId))
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error(`Error updating userStats lastActive for userId ${workflow.userId}:`, error)
|
||||||
|
// Don't rethrow - we want to continue even if this fails
|
||||||
}
|
}
|
||||||
|
|
||||||
return response.json()
|
return {
|
||||||
|
success: true,
|
||||||
|
runsAdded: runs,
|
||||||
|
newTotal: workflow.runCount + runs,
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error(`Error updating workflow stats for ${workflowId}`, error)
|
logger.error(`Error updating workflow stats for ${workflowId}`, error)
|
||||||
throw error
|
throw error
|
||||||
@@ -121,7 +147,6 @@ function sanitizeToolsForComparison(tools: any[] | undefined): any[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return tools.map((tool) => {
|
return tools.map((tool) => {
|
||||||
// Remove UI-only field: isExpanded
|
|
||||||
const { isExpanded, ...cleanTool } = tool
|
const { isExpanded, ...cleanTool } = tool
|
||||||
return cleanTool
|
return cleanTool
|
||||||
})
|
})
|
||||||
@@ -138,7 +163,6 @@ function sanitizeInputFormatForComparison(inputFormat: any[] | undefined): any[]
|
|||||||
}
|
}
|
||||||
|
|
||||||
return inputFormat.map((field) => {
|
return inputFormat.map((field) => {
|
||||||
// Remove test-only field: value (used only for manual testing)
|
|
||||||
const { value, collapsed, ...cleanField } = field
|
const { value, collapsed, ...cleanField } = field
|
||||||
return cleanField
|
return cleanField
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -88,6 +88,7 @@
|
|||||||
"fuse.js": "7.1.0",
|
"fuse.js": "7.1.0",
|
||||||
"gray-matter": "^4.0.3",
|
"gray-matter": "^4.0.3",
|
||||||
"groq-sdk": "^0.15.0",
|
"groq-sdk": "^0.15.0",
|
||||||
|
"html-to-image": "1.11.13",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"input-otp": "^1.4.2",
|
"input-otp": "^1.4.2",
|
||||||
"ioredis": "^5.6.0",
|
"ioredis": "^5.6.0",
|
||||||
|
|||||||
217
apps/sim/tools/jira/get_users.ts
Normal file
217
apps/sim/tools/jira/get_users.ts
Normal file
@@ -0,0 +1,217 @@
|
|||||||
|
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||||
|
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||||
|
|
||||||
|
export interface JiraGetUsersParams {
|
||||||
|
accessToken: string
|
||||||
|
domain: string
|
||||||
|
accountId?: string
|
||||||
|
startAt?: number
|
||||||
|
maxResults?: number
|
||||||
|
cloudId?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface JiraUser {
|
||||||
|
accountId: string
|
||||||
|
accountType?: string
|
||||||
|
active: boolean
|
||||||
|
displayName: string
|
||||||
|
emailAddress?: string
|
||||||
|
avatarUrls?: {
|
||||||
|
'16x16'?: string
|
||||||
|
'24x24'?: string
|
||||||
|
'32x32'?: string
|
||||||
|
'48x48'?: string
|
||||||
|
}
|
||||||
|
timeZone?: string
|
||||||
|
self?: string
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface JiraGetUsersResponse extends ToolResponse {
|
||||||
|
output: {
|
||||||
|
ts: string
|
||||||
|
users: JiraUser[]
|
||||||
|
total?: number
|
||||||
|
startAt?: number
|
||||||
|
maxResults?: number
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const jiraGetUsersTool: ToolConfig<JiraGetUsersParams, JiraGetUsersResponse> = {
|
||||||
|
id: 'jira_get_users',
|
||||||
|
name: 'Jira Get Users',
|
||||||
|
description:
|
||||||
|
'Get Jira users. If an account ID is provided, returns a single user. Otherwise, returns a list of all users.',
|
||||||
|
version: '1.0.0',
|
||||||
|
|
||||||
|
oauth: {
|
||||||
|
required: true,
|
||||||
|
provider: 'jira',
|
||||||
|
},
|
||||||
|
|
||||||
|
params: {
|
||||||
|
accessToken: {
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
visibility: 'hidden',
|
||||||
|
description: 'OAuth access token for Jira',
|
||||||
|
},
|
||||||
|
domain: {
|
||||||
|
type: 'string',
|
||||||
|
required: true,
|
||||||
|
visibility: 'user-only',
|
||||||
|
description: 'Your Jira domain (e.g., yourcompany.atlassian.net)',
|
||||||
|
},
|
||||||
|
accountId: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description:
|
||||||
|
'Optional account ID to get a specific user. If not provided, returns all users.',
|
||||||
|
},
|
||||||
|
startAt: {
|
||||||
|
type: 'number',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'The index of the first user to return (for pagination, default: 0)',
|
||||||
|
},
|
||||||
|
maxResults: {
|
||||||
|
type: 'number',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Maximum number of users to return (default: 50)',
|
||||||
|
},
|
||||||
|
cloudId: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'hidden',
|
||||||
|
description:
|
||||||
|
'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
request: {
|
||||||
|
url: (params: JiraGetUsersParams) => {
|
||||||
|
if (params.cloudId) {
|
||||||
|
if (params.accountId) {
|
||||||
|
return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/user?accountId=${encodeURIComponent(params.accountId)}`
|
||||||
|
}
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
if (params.startAt !== undefined) queryParams.append('startAt', String(params.startAt))
|
||||||
|
if (params.maxResults !== undefined)
|
||||||
|
queryParams.append('maxResults', String(params.maxResults))
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/users/search${queryString ? `?${queryString}` : ''}`
|
||||||
|
}
|
||||||
|
return 'https://api.atlassian.com/oauth/token/accessible-resources'
|
||||||
|
},
|
||||||
|
method: 'GET',
|
||||||
|
headers: (params: JiraGetUsersParams) => ({
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${params.accessToken}`,
|
||||||
|
}),
|
||||||
|
},
|
||||||
|
|
||||||
|
transformResponse: async (response: Response, params?: JiraGetUsersParams) => {
|
||||||
|
if (!params?.cloudId) {
|
||||||
|
const cloudId = await getJiraCloudId(params!.domain, params!.accessToken)
|
||||||
|
|
||||||
|
let usersUrl: string
|
||||||
|
if (params!.accountId) {
|
||||||
|
usersUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/user?accountId=${encodeURIComponent(params!.accountId)}`
|
||||||
|
} else {
|
||||||
|
const queryParams = new URLSearchParams()
|
||||||
|
if (params!.startAt !== undefined) queryParams.append('startAt', String(params!.startAt))
|
||||||
|
if (params!.maxResults !== undefined)
|
||||||
|
queryParams.append('maxResults', String(params!.maxResults))
|
||||||
|
const queryString = queryParams.toString()
|
||||||
|
usersUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/users/search${queryString ? `?${queryString}` : ''}`
|
||||||
|
}
|
||||||
|
|
||||||
|
const usersResponse = await fetch(usersUrl, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
Authorization: `Bearer ${params!.accessToken}`,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
if (!usersResponse.ok) {
|
||||||
|
let message = `Failed to get Jira users (${usersResponse.status})`
|
||||||
|
try {
|
||||||
|
const err = await usersResponse.json()
|
||||||
|
message = err?.errorMessages?.join(', ') || err?.message || message
|
||||||
|
} catch (_e) {}
|
||||||
|
throw new Error(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await usersResponse.json()
|
||||||
|
|
||||||
|
const users = params!.accountId ? [data] : data
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
users: users.map((user: any) => ({
|
||||||
|
accountId: user.accountId,
|
||||||
|
accountType: user.accountType,
|
||||||
|
active: user.active,
|
||||||
|
displayName: user.displayName,
|
||||||
|
emailAddress: user.emailAddress,
|
||||||
|
avatarUrls: user.avatarUrls,
|
||||||
|
timeZone: user.timeZone,
|
||||||
|
self: user.self,
|
||||||
|
})),
|
||||||
|
total: params!.accountId ? 1 : users.length,
|
||||||
|
startAt: params!.startAt || 0,
|
||||||
|
maxResults: params!.maxResults || 50,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
let message = `Failed to get Jira users (${response.status})`
|
||||||
|
try {
|
||||||
|
const err = await response.json()
|
||||||
|
message = err?.errorMessages?.join(', ') || err?.message || message
|
||||||
|
} catch (_e) {}
|
||||||
|
throw new Error(message)
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json()
|
||||||
|
|
||||||
|
const users = params?.accountId ? [data] : data
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
output: {
|
||||||
|
ts: new Date().toISOString(),
|
||||||
|
users: users.map((user: any) => ({
|
||||||
|
accountId: user.accountId,
|
||||||
|
accountType: user.accountType,
|
||||||
|
active: user.active,
|
||||||
|
displayName: user.displayName,
|
||||||
|
emailAddress: user.emailAddress,
|
||||||
|
avatarUrls: user.avatarUrls,
|
||||||
|
timeZone: user.timeZone,
|
||||||
|
self: user.self,
|
||||||
|
})),
|
||||||
|
total: params?.accountId ? 1 : users.length,
|
||||||
|
startAt: params?.startAt || 0,
|
||||||
|
maxResults: params?.maxResults || 50,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
outputs: {
|
||||||
|
ts: { type: 'string', description: 'Timestamp of the operation' },
|
||||||
|
users: {
|
||||||
|
type: 'json',
|
||||||
|
description:
|
||||||
|
'Array of users with accountId, displayName, emailAddress, active status, and avatarUrls',
|
||||||
|
},
|
||||||
|
total: { type: 'number', description: 'Total number of users returned' },
|
||||||
|
startAt: { type: 'number', description: 'Pagination start index' },
|
||||||
|
maxResults: { type: 'number', description: 'Maximum results per page' },
|
||||||
|
},
|
||||||
|
}
|
||||||
@@ -11,6 +11,7 @@ import { jiraDeleteIssueLinkTool } from '@/tools/jira/delete_issue_link'
|
|||||||
import { jiraDeleteWorklogTool } from '@/tools/jira/delete_worklog'
|
import { jiraDeleteWorklogTool } from '@/tools/jira/delete_worklog'
|
||||||
import { jiraGetAttachmentsTool } from '@/tools/jira/get_attachments'
|
import { jiraGetAttachmentsTool } from '@/tools/jira/get_attachments'
|
||||||
import { jiraGetCommentsTool } from '@/tools/jira/get_comments'
|
import { jiraGetCommentsTool } from '@/tools/jira/get_comments'
|
||||||
|
import { jiraGetUsersTool } from '@/tools/jira/get_users'
|
||||||
import { jiraGetWorklogsTool } from '@/tools/jira/get_worklogs'
|
import { jiraGetWorklogsTool } from '@/tools/jira/get_worklogs'
|
||||||
import { jiraRemoveWatcherTool } from '@/tools/jira/remove_watcher'
|
import { jiraRemoveWatcherTool } from '@/tools/jira/remove_watcher'
|
||||||
import { jiraRetrieveTool } from '@/tools/jira/retrieve'
|
import { jiraRetrieveTool } from '@/tools/jira/retrieve'
|
||||||
@@ -44,4 +45,5 @@ export {
|
|||||||
jiraDeleteIssueLinkTool,
|
jiraDeleteIssueLinkTool,
|
||||||
jiraAddWatcherTool,
|
jiraAddWatcherTool,
|
||||||
jiraRemoveWatcherTool,
|
jiraRemoveWatcherTool,
|
||||||
|
jiraGetUsersTool,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -69,6 +69,12 @@ export interface JiraWriteParams {
|
|||||||
cloudId?: string
|
cloudId?: string
|
||||||
issueType: string
|
issueType: string
|
||||||
parent?: { key: string }
|
parent?: { key: string }
|
||||||
|
labels?: string[]
|
||||||
|
duedate?: string
|
||||||
|
reporter?: string
|
||||||
|
environment?: string
|
||||||
|
customFieldId?: string
|
||||||
|
customFieldValue?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface JiraWriteResponse extends ToolResponse {
|
export interface JiraWriteResponse extends ToolResponse {
|
||||||
|
|||||||
@@ -46,14 +46,14 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
|||||||
priority: {
|
priority: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'hidden',
|
visibility: 'user-or-llm',
|
||||||
description: 'Priority for the issue',
|
description: 'Priority ID or name for the issue (e.g., "10000" or "High")',
|
||||||
},
|
},
|
||||||
assignee: {
|
assignee: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'hidden',
|
visibility: 'user-or-llm',
|
||||||
description: 'Assignee for the issue',
|
description: 'Assignee account ID for the issue',
|
||||||
},
|
},
|
||||||
cloudId: {
|
cloudId: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
@@ -68,6 +68,42 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
|||||||
visibility: 'hidden',
|
visibility: 'hidden',
|
||||||
description: 'Type of issue to create (e.g., Task, Story)',
|
description: 'Type of issue to create (e.g., Task, Story)',
|
||||||
},
|
},
|
||||||
|
labels: {
|
||||||
|
type: 'array',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Labels for the issue (array of label names)',
|
||||||
|
},
|
||||||
|
duedate: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Due date for the issue (format: YYYY-MM-DD)',
|
||||||
|
},
|
||||||
|
reporter: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Reporter account ID for the issue',
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Environment information for the issue',
|
||||||
|
},
|
||||||
|
customFieldId: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Custom field ID (e.g., customfield_10001)',
|
||||||
|
},
|
||||||
|
customFieldValue: {
|
||||||
|
type: 'string',
|
||||||
|
required: false,
|
||||||
|
visibility: 'user-or-llm',
|
||||||
|
description: 'Value for the custom field',
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
request: {
|
request: {
|
||||||
@@ -89,6 +125,12 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
|||||||
cloudId: params.cloudId,
|
cloudId: params.cloudId,
|
||||||
issueType: params.issueType,
|
issueType: params.issueType,
|
||||||
parent: params.parent,
|
parent: params.parent,
|
||||||
|
labels: params.labels,
|
||||||
|
duedate: params.duedate,
|
||||||
|
reporter: params.reporter,
|
||||||
|
environment: params.environment,
|
||||||
|
customFieldId: params.customFieldId,
|
||||||
|
customFieldValue: params.customFieldValue,
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -134,5 +176,6 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
|
|||||||
issueKey: { type: 'string', description: 'Created issue key (e.g., PROJ-123)' },
|
issueKey: { type: 'string', description: 'Created issue key (e.g., PROJ-123)' },
|
||||||
summary: { type: 'string', description: 'Issue summary' },
|
summary: { type: 'string', description: 'Issue summary' },
|
||||||
url: { type: 'string', description: 'URL to the created issue' },
|
url: { type: 'string', description: 'URL to the created issue' },
|
||||||
|
assigneeId: { type: 'string', description: 'Account ID of the assigned user (if assigned)' },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -463,6 +463,7 @@ import {
|
|||||||
jiraDeleteWorklogTool,
|
jiraDeleteWorklogTool,
|
||||||
jiraGetAttachmentsTool,
|
jiraGetAttachmentsTool,
|
||||||
jiraGetCommentsTool,
|
jiraGetCommentsTool,
|
||||||
|
jiraGetUsersTool,
|
||||||
jiraGetWorklogsTool,
|
jiraGetWorklogsTool,
|
||||||
jiraRemoveWatcherTool,
|
jiraRemoveWatcherTool,
|
||||||
jiraRetrieveTool,
|
jiraRetrieveTool,
|
||||||
@@ -1478,6 +1479,7 @@ export const tools: Record<string, ToolConfig> = {
|
|||||||
jira_delete_issue_link: jiraDeleteIssueLinkTool,
|
jira_delete_issue_link: jiraDeleteIssueLinkTool,
|
||||||
jira_add_watcher: jiraAddWatcherTool,
|
jira_add_watcher: jiraAddWatcherTool,
|
||||||
jira_remove_watcher: jiraRemoveWatcherTool,
|
jira_remove_watcher: jiraRemoveWatcherTool,
|
||||||
|
jira_get_users: jiraGetUsersTool,
|
||||||
kalshi_get_markets: kalshiGetMarketsTool,
|
kalshi_get_markets: kalshiGetMarketsTool,
|
||||||
kalshi_get_market: kalshiGetMarketTool,
|
kalshi_get_market: kalshiGetMarketTool,
|
||||||
kalshi_get_events: kalshiGetEventsTool,
|
kalshi_get_events: kalshiGetEventsTool,
|
||||||
|
|||||||
@@ -110,10 +110,15 @@ export const slackListChannelsTool: ToolConfig<SlackListChannelsParams, SlackLis
|
|||||||
creator: channel.creator,
|
creator: channel.creator,
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
const ids = channels.map((channel: { id: string }) => channel.id)
|
||||||
|
const names = channels.map((channel: { name: string }) => channel.name)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
channels,
|
channels,
|
||||||
|
ids,
|
||||||
|
names,
|
||||||
count: channels.length,
|
count: channels.length,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -142,6 +147,14 @@ export const slackListChannelsTool: ToolConfig<SlackListChannelsParams, SlackLis
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
ids: {
|
||||||
|
type: 'array',
|
||||||
|
description: 'Array of channel IDs for easy access',
|
||||||
|
},
|
||||||
|
names: {
|
||||||
|
type: 'array',
|
||||||
|
description: 'Array of channel names for easy access',
|
||||||
|
},
|
||||||
count: {
|
count: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: 'Total number of channels returned',
|
description: 'Total number of channels returned',
|
||||||
|
|||||||
@@ -102,10 +102,15 @@ export const slackListUsersTool: ToolConfig<SlackListUsersParams, SlackListUsers
|
|||||||
status_emoji: user.profile?.status_emoji || '',
|
status_emoji: user.profile?.status_emoji || '',
|
||||||
}))
|
}))
|
||||||
|
|
||||||
|
const ids = users.map((user: { id: string }) => user.id)
|
||||||
|
const names = users.map((user: { name: string }) => user.name)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
output: {
|
output: {
|
||||||
users,
|
users,
|
||||||
|
ids,
|
||||||
|
names,
|
||||||
count: users.length,
|
count: users.length,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -133,6 +138,14 @@ export const slackListUsersTool: ToolConfig<SlackListUsersParams, SlackListUsers
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
ids: {
|
||||||
|
type: 'array',
|
||||||
|
description: 'Array of user IDs for easy access',
|
||||||
|
},
|
||||||
|
names: {
|
||||||
|
type: 'array',
|
||||||
|
description: 'Array of usernames for easy access',
|
||||||
|
},
|
||||||
count: {
|
count: {
|
||||||
type: 'number',
|
type: 'number',
|
||||||
description: 'Total number of users returned',
|
description: 'Total number of users returned',
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ export const slackMessageReaderTool: ToolConfig<
|
|||||||
type: 'number',
|
type: 'number',
|
||||||
required: false,
|
required: false,
|
||||||
visibility: 'user-or-llm',
|
visibility: 'user-or-llm',
|
||||||
description: 'Number of messages to retrieve (default: 10, max: 100)',
|
description: 'Number of messages to retrieve (default: 10, max: 15)',
|
||||||
},
|
},
|
||||||
oldest: {
|
oldest: {
|
||||||
type: 'string',
|
type: 'string',
|
||||||
|
|||||||
@@ -245,6 +245,8 @@ export interface SlackChannel {
|
|||||||
export interface SlackListChannelsResponse extends ToolResponse {
|
export interface SlackListChannelsResponse extends ToolResponse {
|
||||||
output: {
|
output: {
|
||||||
channels: SlackChannel[]
|
channels: SlackChannel[]
|
||||||
|
ids: string[]
|
||||||
|
names: string[]
|
||||||
count: number
|
count: number
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -291,6 +293,8 @@ export interface SlackUser {
|
|||||||
export interface SlackListUsersResponse extends ToolResponse {
|
export interface SlackListUsersResponse extends ToolResponse {
|
||||||
output: {
|
output: {
|
||||||
users: SlackUser[]
|
users: SlackUser[]
|
||||||
|
ids: string[]
|
||||||
|
names: string[]
|
||||||
count: number
|
count: number
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
3
bun.lock
3
bun.lock
@@ -139,6 +139,7 @@
|
|||||||
"fuse.js": "7.1.0",
|
"fuse.js": "7.1.0",
|
||||||
"gray-matter": "^4.0.3",
|
"gray-matter": "^4.0.3",
|
||||||
"groq-sdk": "^0.15.0",
|
"groq-sdk": "^0.15.0",
|
||||||
|
"html-to-image": "1.11.13",
|
||||||
"html-to-text": "^9.0.5",
|
"html-to-text": "^9.0.5",
|
||||||
"input-otp": "^1.4.2",
|
"input-otp": "^1.4.2",
|
||||||
"ioredis": "^5.6.0",
|
"ioredis": "^5.6.0",
|
||||||
@@ -2212,6 +2213,8 @@
|
|||||||
|
|
||||||
"html-escaper": ["html-escaper@2.0.2", "", {}, "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="],
|
"html-escaper": ["html-escaper@2.0.2", "", {}, "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="],
|
||||||
|
|
||||||
|
"html-to-image": ["html-to-image@1.11.13", "", {}, "sha512-cuOPoI7WApyhBElTTb9oqsawRvZ0rHhaHwghRLlTuffoD1B2aDemlCruLeZrUIIdvG7gs9xeELEPm6PhuASqrg=="],
|
||||||
|
|
||||||
"html-to-text": ["html-to-text@9.0.5", "", { "dependencies": { "@selderee/plugin-htmlparser2": "^0.11.0", "deepmerge": "^4.3.1", "dom-serializer": "^2.0.0", "htmlparser2": "^8.0.2", "selderee": "^0.11.0" } }, "sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg=="],
|
"html-to-text": ["html-to-text@9.0.5", "", { "dependencies": { "@selderee/plugin-htmlparser2": "^0.11.0", "deepmerge": "^4.3.1", "dom-serializer": "^2.0.0", "htmlparser2": "^8.0.2", "selderee": "^0.11.0" } }, "sha512-qY60FjREgVZL03vJU6IfMV4GDjGBIoOyvuFdpBDIX9yTlDw0TjxVBQp+P8NvpdIXNJvfWBTNul7fsAQJq2FNpg=="],
|
||||||
|
|
||||||
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],
|
"html-url-attributes": ["html-url-attributes@3.0.1", "", {}, "sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ=="],
|
||||||
|
|||||||
@@ -56,6 +56,7 @@ app:
|
|||||||
S3_CHAT_BUCKET_NAME: "chat-files" # Deployed chat assets
|
S3_CHAT_BUCKET_NAME: "chat-files" # Deployed chat assets
|
||||||
S3_COPILOT_BUCKET_NAME: "copilot-files" # Copilot attachments
|
S3_COPILOT_BUCKET_NAME: "copilot-files" # Copilot attachments
|
||||||
S3_PROFILE_PICTURES_BUCKET_NAME: "profile-pictures" # User avatars
|
S3_PROFILE_PICTURES_BUCKET_NAME: "profile-pictures" # User avatars
|
||||||
|
S3_OG_IMAGES_BUCKET_NAME: "og-images" # OpenGraph preview images
|
||||||
|
|
||||||
# Realtime service
|
# Realtime service
|
||||||
realtime:
|
realtime:
|
||||||
|
|||||||
@@ -58,6 +58,7 @@ app:
|
|||||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: "chat-files" # Deployed chat assets container
|
AZURE_STORAGE_CHAT_CONTAINER_NAME: "chat-files" # Deployed chat assets container
|
||||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "copilot-files" # Copilot attachments container
|
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "copilot-files" # Copilot attachments container
|
||||||
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "profile-pictures" # User avatars container
|
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "profile-pictures" # User avatars container
|
||||||
|
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: "og-images" # OpenGraph preview images container
|
||||||
|
|
||||||
# Realtime service
|
# Realtime service
|
||||||
realtime:
|
realtime:
|
||||||
|
|||||||
@@ -133,6 +133,7 @@ app:
|
|||||||
S3_CHAT_BUCKET_NAME: "" # S3 bucket for deployed chat files
|
S3_CHAT_BUCKET_NAME: "" # S3 bucket for deployed chat files
|
||||||
S3_COPILOT_BUCKET_NAME: "" # S3 bucket for copilot files
|
S3_COPILOT_BUCKET_NAME: "" # S3 bucket for copilot files
|
||||||
S3_PROFILE_PICTURES_BUCKET_NAME: "" # S3 bucket for user profile pictures
|
S3_PROFILE_PICTURES_BUCKET_NAME: "" # S3 bucket for user profile pictures
|
||||||
|
S3_OG_IMAGES_BUCKET_NAME: "" # S3 bucket for OpenGraph preview images
|
||||||
|
|
||||||
# Azure Blob Storage Configuration (optional - for file storage)
|
# Azure Blob Storage Configuration (optional - for file storage)
|
||||||
# If configured, files will be stored in Azure Blob instead of local storage
|
# If configured, files will be stored in Azure Blob instead of local storage
|
||||||
@@ -146,6 +147,7 @@ app:
|
|||||||
AZURE_STORAGE_CHAT_CONTAINER_NAME: "" # Azure container for deployed chat files
|
AZURE_STORAGE_CHAT_CONTAINER_NAME: "" # Azure container for deployed chat files
|
||||||
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "" # Azure container for copilot files
|
AZURE_STORAGE_COPILOT_CONTAINER_NAME: "" # Azure container for copilot files
|
||||||
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "" # Azure container for user profile pictures
|
AZURE_STORAGE_PROFILE_PICTURES_CONTAINER_NAME: "" # Azure container for user profile pictures
|
||||||
|
AZURE_STORAGE_OG_IMAGES_CONTAINER_NAME: "" # Azure container for OpenGraph preview images
|
||||||
|
|
||||||
# Service configuration
|
# Service configuration
|
||||||
service:
|
service:
|
||||||
|
|||||||
1
packages/db/migrations/0124_blushing_colonel_america.sql
Normal file
1
packages/db/migrations/0124_blushing_colonel_america.sql
Normal file
@@ -0,0 +1 @@
|
|||||||
|
ALTER TABLE "templates" ADD COLUMN "og_image_url" text;
|
||||||
4
packages/db/migrations/0125_eager_lily_hollister.sql
Normal file
4
packages/db/migrations/0125_eager_lily_hollister.sql
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
CREATE INDEX "api_key_workspace_type_idx" ON "api_key" USING btree ("workspace_id","type");--> statement-breakpoint
|
||||||
|
CREATE INDEX "api_key_user_type_idx" ON "api_key" USING btree ("user_id","type");--> statement-breakpoint
|
||||||
|
CREATE INDEX "verification_expires_at_idx" ON "verification" USING btree ("expires_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "workflow_blocks_type_idx" ON "workflow_blocks" USING btree ("type");
|
||||||
7728
packages/db/migrations/meta/0124_snapshot.json
Normal file
7728
packages/db/migrations/meta/0124_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
7801
packages/db/migrations/meta/0125_snapshot.json
Normal file
7801
packages/db/migrations/meta/0125_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -862,6 +862,20 @@
|
|||||||
"when": 1765932898404,
|
"when": 1765932898404,
|
||||||
"tag": "0123_windy_lockheed",
|
"tag": "0123_windy_lockheed",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 124,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1766108872186,
|
||||||
|
"tag": "0124_blushing_colonel_america",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 125,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1766133598113,
|
||||||
|
"tag": "0125_eager_lily_hollister",
|
||||||
|
"breakpoints": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -108,6 +108,7 @@ export const verification = pgTable(
|
|||||||
},
|
},
|
||||||
(table) => ({
|
(table) => ({
|
||||||
identifierIdx: index('verification_identifier_idx').on(table.identifier),
|
identifierIdx: index('verification_identifier_idx').on(table.identifier),
|
||||||
|
expiresAtIdx: index('verification_expires_at_idx').on(table.expiresAt),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -197,6 +198,7 @@ export const workflowBlocks = pgTable(
|
|||||||
},
|
},
|
||||||
(table) => ({
|
(table) => ({
|
||||||
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
|
workflowIdIdx: index('workflow_blocks_workflow_id_idx').on(table.workflowId),
|
||||||
|
typeIdx: index('workflow_blocks_type_idx').on(table.type),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -618,6 +620,8 @@ export const apiKey = pgTable(
|
|||||||
'workspace_type_check',
|
'workspace_type_check',
|
||||||
sql`(type = 'workspace' AND workspace_id IS NOT NULL) OR (type = 'personal' AND workspace_id IS NULL)`
|
sql`(type = 'workspace' AND workspace_id IS NOT NULL) OR (type = 'personal' AND workspace_id IS NULL)`
|
||||||
),
|
),
|
||||||
|
workspaceTypeIdx: index('api_key_workspace_type_idx').on(table.workspaceId, table.type),
|
||||||
|
userTypeIdx: index('api_key_user_type_idx').on(table.userId, table.type),
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1381,6 +1385,7 @@ export const templates = pgTable(
|
|||||||
tags: text('tags').array().notNull().default(sql`'{}'::text[]`), // Array of tags
|
tags: text('tags').array().notNull().default(sql`'{}'::text[]`), // Array of tags
|
||||||
requiredCredentials: jsonb('required_credentials').notNull().default('[]'), // Array of credential requirements
|
requiredCredentials: jsonb('required_credentials').notNull().default('[]'), // Array of credential requirements
|
||||||
state: jsonb('state').notNull(), // Store the workflow state directly
|
state: jsonb('state').notNull(), // Store the workflow state directly
|
||||||
|
ogImageUrl: text('og_image_url'), // Pre-generated OpenGraph image URL
|
||||||
createdAt: timestamp('created_at').notNull().defaultNow(),
|
createdAt: timestamp('created_at').notNull().defaultNow(),
|
||||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ async function generateIconMapping(): Promise<Record<string, string>> {
|
|||||||
console.log('Generating icon mapping from block definitions...')
|
console.log('Generating icon mapping from block definitions...')
|
||||||
|
|
||||||
const iconMapping: Record<string, string> = {}
|
const iconMapping: Record<string, string> = {}
|
||||||
const blockFiles = await glob(`${BLOCKS_PATH}/*.ts`)
|
const blockFiles = (await glob(`${BLOCKS_PATH}/*.ts`)).sort()
|
||||||
|
|
||||||
for (const blockFile of blockFiles) {
|
for (const blockFile of blockFiles) {
|
||||||
const fileContent = fs.readFileSync(blockFile, 'utf-8')
|
const fileContent = fs.readFileSync(blockFile, 'utf-8')
|
||||||
@@ -132,6 +132,7 @@ function writeIconMapping(iconMapping: Record<string, string>): void {
|
|||||||
|
|
||||||
// Generate mapping with direct references (no dynamic access for tree shaking)
|
// Generate mapping with direct references (no dynamic access for tree shaking)
|
||||||
const mappingEntries = Object.entries(iconMapping)
|
const mappingEntries = Object.entries(iconMapping)
|
||||||
|
.sort(([a], [b]) => a.localeCompare(b))
|
||||||
.map(([blockType, iconName]) => ` ${blockType}: ${iconName},`)
|
.map(([blockType, iconName]) => ` ${blockType}: ${iconName},`)
|
||||||
.join('\n')
|
.join('\n')
|
||||||
|
|
||||||
@@ -1165,7 +1166,7 @@ async function generateAllBlockDocs() {
|
|||||||
const iconMapping = await generateIconMapping()
|
const iconMapping = await generateIconMapping()
|
||||||
writeIconMapping(iconMapping)
|
writeIconMapping(iconMapping)
|
||||||
|
|
||||||
const blockFiles = await glob(`${BLOCKS_PATH}/*.ts`)
|
const blockFiles = (await glob(`${BLOCKS_PATH}/*.ts`)).sort()
|
||||||
|
|
||||||
for (const blockFile of blockFiles) {
|
for (const blockFile of blockFiles) {
|
||||||
await generateBlockDoc(blockFile)
|
await generateBlockDoc(blockFile)
|
||||||
|
|||||||
Reference in New Issue
Block a user