Compare commits

..

11 Commits

Author SHA1 Message Date
priyanshu.solanki
f81c0ba9bf fix- adding the useWebhookUrl check becfore calling loadWebhookOrGenerateUrl function: 2025-12-18 12:20:13 -07:00
priyanshu.solanki
6c10f31a40 using official mcp sdk and added description fields 2025-12-17 21:20:30 -07:00
priyanshu.solanki
896e9674c2 removing unecessary auth 2025-12-17 18:58:51 -07:00
priyanshu.solanki
f2450d3c26 refactored code to use hasstartblock from the tirgger utils 2025-12-17 18:03:01 -07:00
priyanshu.solanki
cfbe4a4790 fix lint errors 2025-12-17 17:40:07 -07:00
priyanshu.solanki
1f22d7a9ec fix 2025-12-17 17:37:31 -07:00
priyanshu.solanki
2259bfcb8f fixing merge conflicts 2025-12-17 17:25:28 -07:00
priyanshu.solanki
85af046754 using mcn components 2025-12-17 17:25:27 -07:00
priyanshu.solanki
57f3697dd5 fixing lint issues 2025-12-17 17:25:27 -07:00
priyanshu.solanki
a15ac7360d fixed the issue of UI rendering for deleted mcp servers 2025-12-17 17:25:27 -07:00
priyanshu.solanki
93217438ef added a workflow as mcp 2025-12-17 17:24:16 -07:00
342 changed files with 10199 additions and 28730 deletions

View File

@@ -48,19 +48,6 @@ jobs:
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
run: bun run test
- name: Check schema and migrations are in sync
working-directory: packages/db
run: |
bunx drizzle-kit generate --config=./drizzle.config.ts
if [ -n "$(git status --porcelain ./migrations)" ]; then
echo "❌ Schema and migrations are out of sync!"
echo "Run 'cd packages/db && bunx drizzle-kit generate' and commit the new migrations."
git status --porcelain ./migrations
git diff ./migrations
exit 1
fi
echo "✅ Schema and migrations are in sync"
- name: Build application
env:
NODE_OPTIONS: '--no-warnings'

View File

@@ -188,7 +188,6 @@ DATABASE_URL="postgresql://postgres:your_password@localhost:5432/simstudio"
Then run the migrations:
```bash
cd packages/db # Required so drizzle picks correct .env file
bunx drizzle-kit migrate --config=./drizzle.config.ts
```

View File

@@ -1,23 +0,0 @@
import { DocsBody, DocsPage } from 'fumadocs-ui/page'
export const metadata = {
title: 'Page Not Found',
}
export default function NotFound() {
return (
<DocsPage>
<DocsBody>
<div className='flex min-h-[60vh] flex-col items-center justify-center text-center'>
<h1 className='mb-4 bg-gradient-to-b from-[#8357FF] to-[#6F3DFA] bg-clip-text font-bold text-8xl text-transparent'>
404
</h1>
<h2 className='mb-2 font-semibold text-2xl text-foreground'>Page Not Found</h2>
<p className='text-muted-foreground'>
The page you're looking for doesn't exist or has been moved.
</p>
</div>
</DocsBody>
</DocsPage>
)
}

View File

@@ -6,10 +6,7 @@ import { source } from '@/lib/source'
export const revalidate = false
export async function GET(
_request: NextRequest,
{ params }: { params: Promise<{ slug?: string[] }> }
) {
export async function GET(_req: NextRequest, { params }: { params: Promise<{ slug?: string[] }> }) {
const { slug } = await params
let lang: (typeof i18n.languages)[number] = i18n.defaultLanguage

View File

@@ -2452,56 +2452,6 @@ export const GeminiIcon = (props: SVGProps<SVGSVGElement>) => (
</svg>
)
export const VertexIcon = (props: SVGProps<SVGSVGElement>) => (
<svg
{...props}
id='standard_product_icon'
xmlns='http://www.w3.org/2000/svg'
version='1.1'
viewBox='0 0 512 512'
>
<g id='bounding_box'>
<rect width='512' height='512' fill='none' />
</g>
<g id='art'>
<path
d='M128,244.99c-8.84,0-16-7.16-16-16v-95.97c0-8.84,7.16-16,16-16s16,7.16,16,16v95.97c0,8.84-7.16,16-16,16Z'
fill='#ea4335'
/>
<path
d='M256,458c-2.98,0-5.97-.83-8.59-2.5l-186-122c-7.46-4.74-9.65-14.63-4.91-22.09,4.75-7.46,14.64-9.65,22.09-4.91l177.41,116.53,177.41-116.53c7.45-4.74,17.34-2.55,22.09,4.91,4.74,7.46,2.55,17.34-4.91,22.09l-186,122c-2.62,1.67-5.61,2.5-8.59,2.5Z'
fill='#fbbc04'
/>
<path
d='M256,388.03c-8.84,0-16-7.16-16-16v-73.06c0-8.84,7.16-16,16-16s16,7.16,16,16v73.06c0,8.84-7.16,16-16,16Z'
fill='#34a853'
/>
<circle cx='128' cy='70' r='16' fill='#ea4335' />
<circle cx='128' cy='292' r='16' fill='#ea4335' />
<path
d='M384.23,308.01c-8.82,0-15.98-7.14-16-15.97l-.23-94.01c-.02-8.84,7.13-16.02,15.97-16.03h.04c8.82,0,15.98,7.14,16,15.97l.23,94.01c.02,8.84-7.13,16.02-15.97,16.03h-.04Z'
fill='#4285f4'
/>
<circle cx='384' cy='70' r='16' fill='#4285f4' />
<circle cx='384' cy='134' r='16' fill='#4285f4' />
<path
d='M320,220.36c-8.84,0-16-7.16-16-16v-103.02c0-8.84,7.16-16,16-16s16,7.16,16,16v103.02c0,8.84-7.16,16-16,16Z'
fill='#fbbc04'
/>
<circle cx='256' cy='171' r='16' fill='#34a853' />
<circle cx='256' cy='235' r='16' fill='#34a853' />
<circle cx='320' cy='265' r='16' fill='#fbbc04' />
<circle cx='320' cy='329' r='16' fill='#fbbc04' />
<path
d='M192,217.36c-8.84,0-16-7.16-16-16v-100.02c0-8.84,7.16-16,16-16s16,7.16,16,16v100.02c0,8.84-7.16,16-16,16Z'
fill='#fbbc04'
/>
<circle cx='192' cy='265' r='16' fill='#fbbc04' />
<circle cx='192' cy='329' r='16' fill='#fbbc04' />
</g>
</svg>
)
export const CerebrasIcon = (props: SVGProps<SVGSVGElement>) => (
<svg
{...props}
@@ -3387,14 +3337,17 @@ export function SalesforceIcon(props: SVGProps<SVGSVGElement>) {
export function ServiceNowIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 71.1 63.6'>
<svg
{...props}
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 1570 1403'
width='48'
height='48'
>
<path
fill='#62d84e'
fillRule='evenodd'
clipRule='evenodd'
fill='#62D84E'
d='M35.8,0C16.1,0,0,15.9,0,35.6c0,9.8,4,19.3,11.2,26c2.5,2.4,6.4,2.6,9.2,0.5c9-6.7,21.4-6.7,30.4,0
c2.8,2.1,6.7,1.9,9.2-0.5C74.3,48,74.9,25.4,61.3,11.1C54.7,4.1,45.4,0.1,35.8,0 M35.6,53.5C26,53.8,18,46.2,17.8,36.7
c0-0.3,0-0.6,0-0.9c0-9.8,8-17.8,17.8-17.8s17.8,8,17.8,17.8c0.3,9.6-7.3,17.5-16.8,17.8C36.2,53.5,35.9,53.5,35.6,53.5'
d='M1228.4 138.9c129.2 88.9 228.9 214.3 286.3 360.2 57.5 145.8 70 305.5 36 458.5S1437.8 1250 1324 1357.9c-13.3 12.9-28.8 23.4-45.8 30.8-17 7.5-35.2 11.9-53.7 12.9-18.5 1.1-37.1-1.1-54.8-6.6-17.7-5.4-34.3-13.9-49.1-25.2-48.2-35.9-101.8-63.8-158.8-82.6-57.1-18.9-116.7-28.5-176.8-28.5s-119.8 9.6-176.8 28.5c-57 18.8-110.7 46.7-158.9 82.6-14.6 11.2-31 19.8-48.6 25.3s-36 7.8-54.4 6.8c-18.4-.9-36.5-5.1-53.4-12.4s-32.4-17.5-45.8-30.2C132.5 1251 53 1110.8 19 956.8s-20.9-314.6 37.6-461c58.5-146.5 159.6-272 290.3-360.3S631.8.1 789.6.5c156.8 1.3 309.6 49.6 438.8 138.4m-291.8 1014c48.2-19.2 92-48 128.7-84.6 36.7-36.7 65.5-80.4 84.7-128.6 19.2-48.1 28.4-99.7 27-151.5 0-103.9-41.3-203.5-114.8-277S889 396.4 785 396.4s-203.7 41.3-277.2 114.8S393 684.3 393 788.2c-1.4 51.8 7.8 103.4 27 151.5 19.2 48.2 48 91.9 84.7 128.6 36.7 36.6 80.5 65.4 128.6 84.6 48.2 19.2 99.8 28.4 151.7 27 51.8 1.4 103.4-7.8 151.6-27'
/>
</svg>
)

View File

@@ -120,117 +120,117 @@ import {
type IconComponent = ComponentType<SVGProps<SVGSVGElement>>
export const blockTypeToIconMap: Record<string, IconComponent> = {
ahrefs: AhrefsIcon,
airtable: AirtableIcon,
apify: ApifyIcon,
apollo: ApolloIcon,
arxiv: ArxivIcon,
asana: AsanaIcon,
browser_use: BrowserUseIcon,
calendly: CalendlyIcon,
clay: ClayIcon,
confluence: ConfluenceIcon,
cursor: CursorIcon,
datadog: DatadogIcon,
discord: DiscordIcon,
dropbox: DropboxIcon,
duckduckgo: DuckDuckGoIcon,
dynamodb: DynamoDBIcon,
elasticsearch: ElasticsearchIcon,
elevenlabs: ElevenLabsIcon,
exa: ExaAIIcon,
file: DocumentIcon,
firecrawl: FirecrawlIcon,
github: GithubIcon,
gitlab: GitLabIcon,
gmail: GmailIcon,
google_calendar: GoogleCalendarIcon,
google_docs: GoogleDocsIcon,
google_drive: GoogleDriveIcon,
google_forms: GoogleFormsIcon,
google_groups: GoogleGroupsIcon,
google_search: GoogleIcon,
google_sheets: GoogleSheetsIcon,
google_slides: GoogleSlidesIcon,
google_vault: GoogleVaultIcon,
grafana: GrafanaIcon,
hubspot: HubspotIcon,
huggingface: HuggingFaceIcon,
hunter: HunterIOIcon,
image_generator: ImageIcon,
incidentio: IncidentioIcon,
intercom: IntercomIcon,
jina: JinaAIIcon,
jira: JiraIcon,
kalshi: KalshiIcon,
knowledge: PackageSearchIcon,
linear: LinearIcon,
linkedin: LinkedInIcon,
linkup: LinkupIcon,
mailchimp: MailchimpIcon,
mailgun: MailgunIcon,
mem0: Mem0Icon,
postgresql: PostgresIcon,
twilio_voice: TwilioIcon,
elasticsearch: ElasticsearchIcon,
rds: RDSIcon,
translate: TranslateIcon,
dynamodb: DynamoDBIcon,
wordpress: WordpressIcon,
tavily: TavilyIcon,
zendesk: ZendeskIcon,
youtube: YouTubeIcon,
supabase: SupabaseIcon,
vision: EyeIcon,
zoom: ZoomIcon,
confluence: ConfluenceIcon,
arxiv: ArxivIcon,
webflow: WebflowIcon,
pinecone: PineconeIcon,
apollo: ApolloIcon,
servicenow: ServiceNowIcon,
whatsapp: WhatsAppIcon,
typeform: TypeformIcon,
qdrant: QdrantIcon,
shopify: ShopifyIcon,
asana: AsanaIcon,
sqs: SQSIcon,
apify: ApifyIcon,
memory: BrainIcon,
microsoft_excel: MicrosoftExcelIcon,
microsoft_planner: MicrosoftPlannerIcon,
microsoft_teams: MicrosoftTeamsIcon,
mistral_parse: MistralIcon,
gitlab: GitLabIcon,
polymarket: PolymarketIcon,
serper: SerperIcon,
linear: LinearIcon,
exa: ExaAIIcon,
telegram: TelegramIcon,
salesforce: SalesforceIcon,
hubspot: HubspotIcon,
hunter: HunterIOIcon,
linkup: LinkupIcon,
mongodb: MongoDBIcon,
mysql: MySQLIcon,
airtable: AirtableIcon,
discord: DiscordIcon,
ahrefs: AhrefsIcon,
neo4j: Neo4jIcon,
notion: NotionIcon,
onedrive: MicrosoftOneDriveIcon,
tts: TTSIcon,
jina: JinaAIIcon,
google_docs: GoogleDocsIcon,
perplexity: PerplexityIcon,
google_search: GoogleIcon,
x: xIcon,
kalshi: KalshiIcon,
google_calendar: GoogleCalendarIcon,
zep: ZepIcon,
posthog: PosthogIcon,
grafana: GrafanaIcon,
google_slides: GoogleSlidesIcon,
microsoft_planner: MicrosoftPlannerIcon,
thinking: BrainIcon,
pipedrive: PipedriveIcon,
dropbox: DropboxIcon,
stagehand: StagehandIcon,
google_forms: GoogleFormsIcon,
file: DocumentIcon,
mistral_parse: MistralIcon,
gmail: GmailIcon,
openai: OpenAIIcon,
outlook: OutlookIcon,
parallel_ai: ParallelIcon,
perplexity: PerplexityIcon,
pinecone: PineconeIcon,
pipedrive: PipedriveIcon,
polymarket: PolymarketIcon,
postgresql: PostgresIcon,
posthog: PosthogIcon,
qdrant: QdrantIcon,
rds: RDSIcon,
reddit: RedditIcon,
incidentio: IncidentioIcon,
onedrive: MicrosoftOneDriveIcon,
resend: ResendIcon,
s3: S3Icon,
salesforce: SalesforceIcon,
search: SearchIcon,
sendgrid: SendgridIcon,
sentry: SentryIcon,
serper: SerperIcon,
servicenow: ServiceNowIcon,
sftp: SftpIcon,
google_vault: GoogleVaultIcon,
sharepoint: MicrosoftSharepointIcon,
shopify: ShopifyIcon,
slack: SlackIcon,
smtp: SmtpIcon,
spotify: SpotifyIcon,
sqs: SQSIcon,
ssh: SshIcon,
stagehand: StagehandIcon,
stripe: StripeIcon,
stt: STTIcon,
supabase: SupabaseIcon,
tavily: TavilyIcon,
telegram: TelegramIcon,
thinking: BrainIcon,
translate: TranslateIcon,
trello: TrelloIcon,
tts: TTSIcon,
twilio_sms: TwilioIcon,
twilio_voice: TwilioIcon,
typeform: TypeformIcon,
huggingface: HuggingFaceIcon,
sendgrid: SendgridIcon,
video_generator: VideoIcon,
vision: EyeIcon,
smtp: SmtpIcon,
google_groups: GoogleGroupsIcon,
mailgun: MailgunIcon,
clay: ClayIcon,
jira: JiraIcon,
search: SearchIcon,
linkedin: LinkedInIcon,
wealthbox: WealthboxIcon,
webflow: WebflowIcon,
whatsapp: WhatsAppIcon,
notion: NotionIcon,
elevenlabs: ElevenLabsIcon,
microsoft_teams: MicrosoftTeamsIcon,
github: GithubIcon,
sftp: SftpIcon,
ssh: SshIcon,
google_drive: GoogleDriveIcon,
sentry: SentryIcon,
reddit: RedditIcon,
parallel_ai: ParallelIcon,
spotify: SpotifyIcon,
stripe: StripeIcon,
s3: S3Icon,
trello: TrelloIcon,
mem0: Mem0Icon,
knowledge: PackageSearchIcon,
intercom: IntercomIcon,
twilio_sms: TwilioIcon,
duckduckgo: DuckDuckGoIcon,
slack: SlackIcon,
datadog: DatadogIcon,
microsoft_excel: MicrosoftExcelIcon,
image_generator: ImageIcon,
google_sheets: GoogleSheetsIcon,
wikipedia: WikipediaIcon,
wordpress: WordpressIcon,
x: xIcon,
youtube: YouTubeIcon,
zendesk: ZendeskIcon,
zep: ZepIcon,
zoom: ZoomIcon,
cursor: CursorIcon,
firecrawl: FirecrawlIcon,
mysql: MySQLIcon,
browser_use: BrowserUseIcon,
stt: STTIcon,
}

View File

@@ -111,24 +111,26 @@ Verschiedene Blocktypen erzeugen unterschiedliche Ausgabestrukturen. Hier ist, w
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### Ausgabefelder des Condition-Blocks
- **content**: Der ursprüngliche, durchgeleitete Inhalt
- **conditionResult**: Boolesches Ergebnis der Bedingungsauswertung
- **selectedPath**: Informationen über den ausgewählten Pfad
- **blockId**: ID des nächsten Blocks im ausgewählten Pfad
- **blockType**: Typ des nächsten Blocks
- **blockTitle**: Titel des nächsten Blocks
- **selectedOption**: ID der ausgewählten Bedingung
- **selectedConditionId**: ID der ausgewählten Bedingung
</Tab>
<Tab>

View File

@@ -90,20 +90,14 @@ Ein Jira-Issue erstellen
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
| `domain` | string | Ja | Ihre Jira-Domain (z.B. ihrfirma.atlassian.net) |
| `projectId` | string | Ja | Projekt-ID für das Issue |
| `summary` | string | Ja | Zusammenfassung für das Issue |
| `description` | string | Nein | Beschreibung für das Issue |
| `priority` | string | Nein | Prioritäts-ID oder -Name für das Issue \(z.B. "10000" oder "High"\) |
| `assignee` | string | Nein | Account-ID des Bearbeiters für das Issue |
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie über die Domain abgerufen. |
| `issueType` | string | Ja | Typ des zu erstellenden Issues \(z.B. Task, Story\) |
| `labels` | array | Nein | Labels für das Issue \(Array von Label-Namen\) |
| `duedate` | string | Nein | Fälligkeitsdatum für das Issue \(Format: YYYY-MM-DD\) |
| `reporter` | string | Nein | Account-ID des Melders für das Issue |
| `environment` | string | Nein | Umgebungsinformationen für das Issue |
| `customFieldId` | string | Nein | Benutzerdefinierte Feld-ID \(z.B. customfield_10001\) |
| `customFieldValue` | string | Nein | Wert für das benutzerdefinierte Feld |
| `priority` | string | Nein | Priorität für das Issue |
| `assignee` | string | Nein | Bearbeiter für das Issue |
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
| `issueType` | string | Ja | Art des zu erstellenden Issues (z.B. Task, Story) |
#### Ausgabe
@@ -113,7 +107,6 @@ Ein Jira-Issue erstellen
| `issueKey` | string | Erstellter Issue-Key \(z.B. PROJ-123\) |
| `summary` | string | Issue-Zusammenfassung |
| `url` | string | URL zum erstellten Issue |
| `assigneeId` | string | Account-ID des zugewiesenen Benutzers \(falls zugewiesen\) |
### `jira_bulk_read`
@@ -527,30 +520,6 @@ Einen Beobachter von einem Jira-Issue entfernen
| `issueKey` | string | Issue-Key |
| `watcherAccountId` | string | Account-ID des entfernten Beobachters |
### `jira_get_users`
Jira-Benutzer abrufen. Wenn eine Account-ID angegeben wird, wird ein einzelner Benutzer zurückgegeben. Andernfalls wird eine Liste aller Benutzer zurückgegeben.
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Ja | Ihre Jira-Domain \(z.B. ihrfirma.atlassian.net\) |
| `accountId` | string | Nein | Optionale Account-ID, um einen bestimmten Benutzer abzurufen. Wenn nicht angegeben, werden alle Benutzer zurückgegeben. |
| `startAt` | number | Nein | Der Index des ersten zurückzugebenden Benutzers \(für Paginierung, Standard: 0\) |
| `maxResults` | number | Nein | Maximale Anzahl der zurückzugebenden Benutzer \(Standard: 50\) |
| `cloudId` | string | Nein | Jira Cloud-ID für die Instanz. Wenn nicht angegeben, wird sie anhand der Domain abgerufen. |
#### Ausgabe
| Parameter | Typ | Beschreibung |
| --------- | ---- | ----------- |
| `ts` | string | Zeitstempel der Operation |
| `users` | json | Array von Benutzern mit accountId, displayName, emailAddress, active-Status und avatarUrls |
| `total` | number | Gesamtanzahl der zurückgegebenen Benutzer |
| `startAt` | number | Startindex für Paginierung |
| `maxResults` | number | Maximale Ergebnisse pro Seite |
## Hinweise
- Kategorie: `tools`

View File

@@ -1,6 +1,7 @@
---
title: ServiceNow
description: ServiceNow-Datensätze erstellen, lesen, aktualisieren und löschen
description: Erstellen, lesen, aktualisieren, löschen und Massenimport von
ServiceNow-Datensätzen
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,36 +11,22 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/) ist eine leistungsstarke Cloud-Plattform zur Optimierung und Automatisierung von IT-Service-Management (ITSM), Workflows und Geschäftsprozessen in Ihrem Unternehmen. ServiceNow ermöglicht Ihnen die Verwaltung von Vorfällen, Anfragen, Aufgaben, Benutzern und mehr über seine umfangreiche API.
## Nutzungsanleitung
Mit ServiceNow können Sie:
- **IT-Workflows automatisieren**: Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren und löschen, z. B. Vorfälle, Aufgaben, Änderungsanfragen und Benutzer.
- **Systeme integrieren**: ServiceNow mit Ihren anderen Tools und Prozessen für nahtlose Automatisierung verbinden.
- **Eine einzige Informationsquelle pflegen**: Alle Ihre Service- und Betriebsdaten organisiert und zugänglich halten.
- **Betriebliche Effizienz steigern**: Manuelle Arbeit reduzieren und Servicequalität mit anpassbaren Workflows und Automatisierung verbessern.
In Sim ermöglicht die ServiceNow-Integration Ihren Agenten, direkt mit Ihrer ServiceNow-Instanz als Teil ihrer Workflows zu interagieren. Agenten können Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren oder löschen und Ticket- oder Benutzerdaten für ausgefeilte Automatisierung und Entscheidungsfindung nutzen. Diese Integration verbindet Ihre Workflow-Automatisierung und IT-Betrieb und befähigt Ihre Agenten, Serviceanfragen, Vorfälle, Benutzer und Assets ohne manuelle Eingriffe zu verwalten. Durch die Verbindung von Sim mit ServiceNow können Sie Service-Management-Aufgaben automatisieren, Reaktionszeiten verbessern und konsistenten, sicheren Zugriff auf die wichtigen Servicedaten Ihres Unternehmens gewährleisten.
{/* MANUAL-CONTENT-END */}
## Nutzungsanweisungen
Integrieren Sie ServiceNow in Ihren Workflow. Erstellen, lesen, aktualisieren und löschen Sie Datensätze in jeder ServiceNow-Tabelle, einschließlich Vorfälle, Aufgaben, Änderungsanfragen, Benutzer und mehr.
Integrieren Sie ServiceNow in Ihren Workflow. Kann Datensätze in jeder ServiceNow-Tabelle erstellen, lesen, aktualisieren und löschen (Vorfälle, Aufgaben, Benutzer usw.). Unterstützt Massenimport-Operationen für Datenmigration und ETL.
## Tools
### `servicenow_create_record`
Einen neuen Datensatz in einer ServiceNow-Tabelle erstellen
Erstellen eines neuen Datensatzes in einer ServiceNow-Tabelle
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
| `username` | string | Ja | ServiceNow-Benutzername |
| `password` | string | Ja | ServiceNow-Passwort |
| `credential` | string | Nein | ServiceNow OAuth-Anmeldeinformations-ID |
| `tableName` | string | Ja | Tabellenname \(z. B. incident, task, sys_user\) |
| `fields` | json | Ja | Felder, die für den Datensatz festgelegt werden sollen \(JSON-Objekt\) |
@@ -52,15 +39,14 @@ Einen neuen Datensatz in einer ServiceNow-Tabelle erstellen
### `servicenow_read_record`
Datensätze aus einer ServiceNow-Tabelle lesen
Lesen von Datensätzen aus einer ServiceNow-Tabelle
#### Eingabe
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
| `username` | string | Ja | ServiceNow-Benutzername |
| `password` | string | Ja | ServiceNow-Passwort |
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(automatisch aus OAuth erkannt, falls nicht angegeben\) |
| `credential` | string | Nein | ServiceNow OAuth-Anmeldeinformations-ID |
| `tableName` | string | Ja | Tabellenname |
| `sysId` | string | Nein | Spezifische Datensatz-sys_id |
| `number` | string | Nein | Datensatznummer \(z. B. INC0010001\) |
@@ -83,11 +69,10 @@ Einen bestehenden Datensatz in einer ServiceNow-Tabelle aktualisieren
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
| `username` | string | Ja | ServiceNow-Benutzername |
| `password` | string | Ja | ServiceNow-Passwort |
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(wird automatisch aus OAuth erkannt, falls nicht angegeben\) |
| `credential` | string | Nein | ServiceNow-OAuth-Credential-ID |
| `tableName` | string | Ja | Tabellenname |
| `sysId` | string | Ja | Datensatz-sys_id zum Aktualisieren |
| `sysId` | string | Ja | Sys_id des zu aktualisierenden Datensatzes |
| `fields` | json | Ja | Zu aktualisierende Felder \(JSON-Objekt\) |
#### Ausgabe
@@ -105,11 +90,10 @@ Einen Datensatz aus einer ServiceNow-Tabelle löschen
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Ja | ServiceNow-Instanz-URL \(z. B. https://instance.service-now.com\) |
| `username` | string | Ja | ServiceNow-Benutzername |
| `password` | string | Ja | ServiceNow-Passwort |
| `instanceUrl` | string | Nein | ServiceNow-Instanz-URL \(wird automatisch aus OAuth erkannt, falls nicht angegeben\) |
| `credential` | string | Nein | ServiceNow-OAuth-Credential-ID |
| `tableName` | string | Ja | Tabellenname |
| `sysId` | string | Ja | Datensatz-sys_id zum Löschen |
| `sysId` | string | Ja | Sys_id des zu löschenden Datensatzes |
#### Ausgabe

View File

@@ -109,12 +109,12 @@ Lesen Sie die neuesten Nachrichten aus Slack-Kanälen. Rufen Sie den Konversatio
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `authMethod` | string | Nein | Authentifizierungsmethode: oauth oder bot_token |
| `botToken` | string | Nein | Bot-Token für Custom Bot |
| `botToken` | string | Nein | Bot-Token für benutzerdefinierten Bot |
| `channel` | string | Nein | Slack-Kanal, aus dem Nachrichten gelesen werden sollen \(z.B. #general\) |
| `userId` | string | Nein | Benutzer-ID für DM-Konversation \(z.B. U1234567890\) |
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 15\) |
| `oldest` | string | Nein | Beginn des Zeitbereichs \(Zeitstempel\) |
| `latest` | string | Nein | Ende des Zeitbereichs \(Zeitstempel\) |
| `limit` | number | Nein | Anzahl der abzurufenden Nachrichten \(Standard: 10, max: 100\) |
| `oldest` | string | Nein | Beginn des Zeitraums \(Zeitstempel\) |
| `latest` | string | Nein | Ende des Zeitraums \(Zeitstempel\) |
#### Ausgabe

View File

@@ -39,16 +39,14 @@ Senden Sie eine Chat-Completion-Anfrage an jeden unterstützten LLM-Anbieter
| Parameter | Typ | Erforderlich | Beschreibung |
| --------- | ---- | -------- | ----------- |
| `model` | string | Ja | Das zu verwendende Modell \(z. B. gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `systemPrompt` | string | Nein | System-Prompt zur Festlegung des Verhaltens des Assistenten |
| `context` | string | Ja | Die Benutzernachricht oder der Kontext, der an das Modell gesendet werden soll |
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter \(verwendet Plattform-Schlüssel, falls nicht für gehostete Modelle angegeben\) |
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung \(0-2\) |
| `maxTokens` | number | Nein | Maximale Anzahl von Tokens in der Antwort |
| `model` | string | Ja | Das zu verwendende Modell (z.B. gpt-4o, claude-sonnet-4-5, gemini-2.0-flash) |
| `systemPrompt` | string | Nein | System-Prompt zur Festlegung des Assistentenverhaltens |
| `context` | string | Ja | Die Benutzernachricht oder der Kontext, der an das Modell gesendet wird |
| `apiKey` | string | Nein | API-Schlüssel für den Anbieter (verwendet den Plattformschlüssel, wenn für gehostete Modelle nicht angegeben) |
| `temperature` | number | Nein | Temperatur für die Antwortgenerierung (0-2) |
| `maxTokens` | number | Nein | Maximale Tokens in der Antwort |
| `azureEndpoint` | string | Nein | Azure OpenAI-Endpunkt-URL |
| `azureApiVersion` | string | Nein | Azure OpenAI-API-Version |
| `vertexProject` | string | Nein | Google Cloud-Projekt-ID für Vertex AI |
| `vertexLocation` | string | Nein | Google Cloud-Standort für Vertex AI \(Standard: us-central1\) |
| `azureApiVersion` | string | Nein | Azure OpenAI API-Version |
#### Ausgabe

View File

@@ -106,24 +106,26 @@ Different block types produce different output structures. Here's what you can e
<Tab>
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### Condition Block Output Fields
- **content**: The original content passed through
- **conditionResult**: Boolean result of the condition evaluation
- **selectedPath**: Information about the selected path
- **blockId**: ID of the next block in the selected path
- **blockType**: Type of the next block
- **blockTitle**: Title of the next block
- **selectedOption**: ID of the selected condition
- **selectedConditionId**: ID of the selected condition
</Tab>
<Tab>

View File

@@ -97,16 +97,10 @@ Write a Jira issue
| `projectId` | string | Yes | Project ID for the issue |
| `summary` | string | Yes | Summary for the issue |
| `description` | string | No | Description for the issue |
| `priority` | string | No | Priority ID or name for the issue \(e.g., "10000" or "High"\) |
| `assignee` | string | No | Assignee account ID for the issue |
| `priority` | string | No | Priority for the issue |
| `assignee` | string | No | Assignee for the issue |
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
| `issueType` | string | Yes | Type of issue to create \(e.g., Task, Story\) |
| `labels` | array | No | Labels for the issue \(array of label names\) |
| `duedate` | string | No | Due date for the issue \(format: YYYY-MM-DD\) |
| `reporter` | string | No | Reporter account ID for the issue |
| `environment` | string | No | Environment information for the issue |
| `customFieldId` | string | No | Custom field ID \(e.g., customfield_10001\) |
| `customFieldValue` | string | No | Value for the custom field |
#### Output
@@ -116,7 +110,6 @@ Write a Jira issue
| `issueKey` | string | Created issue key \(e.g., PROJ-123\) |
| `summary` | string | Issue summary |
| `url` | string | URL to the created issue |
| `assigneeId` | string | Account ID of the assigned user \(if assigned\) |
### `jira_bulk_read`
@@ -530,30 +523,6 @@ Remove a watcher from a Jira issue
| `issueKey` | string | Issue key |
| `watcherAccountId` | string | Removed watcher account ID |
### `jira_get_users`
Get Jira users. If an account ID is provided, returns a single user. Otherwise, returns a list of all users.
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
| `accountId` | string | No | Optional account ID to get a specific user. If not provided, returns all users. |
| `startAt` | number | No | The index of the first user to return \(for pagination, default: 0\) |
| `maxResults` | number | No | Maximum number of users to return \(default: 50\) |
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `ts` | string | Timestamp of the operation |
| `users` | json | Array of users with accountId, displayName, emailAddress, active status, and avatarUrls |
| `total` | number | Total number of users returned |
| `startAt` | number | Pagination start index |
| `maxResults` | number | Maximum results per page |
## Notes

View File

@@ -1,6 +1,6 @@
---
title: ServiceNow
description: Create, read, update, and delete ServiceNow records
description: Create, read, update, delete, and bulk import ServiceNow records
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,23 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/) is a powerful cloud platform designed to streamline and automate IT service management (ITSM), workflows, and business processes across your organization. ServiceNow enables you to manage incidents, requests, tasks, users, and more using its extensive API.
With ServiceNow, you can:
- **Automate IT workflows**: Create, read, update, and delete records in any ServiceNow table, such as incidents, tasks, change requests, and users.
- **Integrate systems**: Connect ServiceNow with your other tools and processes for seamless automation.
- **Maintain a single source of truth**: Keep all your service and operations data organized and accessible.
- **Drive operational efficiency**: Reduce manual work and improve service quality with customizable workflows and automation.
In Sim, the ServiceNow integration enables your agents to interact directly with your ServiceNow instance as part of their workflows. Agents can create, read, update, or delete records in any ServiceNow table and leverage ticket or user data for sophisticated automation and decision-making. This integration bridges your workflow automation and IT operations, empowering your agents to manage service requests, incidents, users, and assets without manual intervention. By connecting Sim with ServiceNow, you can automate service management tasks, improve response times, and ensure consistent, secure access to your organization's vital service data.
{/* MANUAL-CONTENT-END */}
## Usage Instructions
Integrate ServiceNow into your workflow. Create, read, update, and delete records in any ServiceNow table including incidents, tasks, change requests, users, and more.
Integrate ServiceNow into your workflow. Can create, read, update, and delete records in any ServiceNow table (incidents, tasks, users, etc.). Supports bulk import operations for data migration and ETL.
@@ -41,8 +27,7 @@ Create a new record in a ServiceNow table
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
| `username` | string | Yes | ServiceNow username |
| `password` | string | Yes | ServiceNow password |
| `credential` | string | No | ServiceNow OAuth credential ID |
| `tableName` | string | Yes | Table name \(e.g., incident, task, sys_user\) |
| `fields` | json | Yes | Fields to set on the record \(JSON object\) |
@@ -61,9 +46,8 @@ Read records from a ServiceNow table
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
| `username` | string | Yes | ServiceNow username |
| `password` | string | Yes | ServiceNow password |
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
| `credential` | string | No | ServiceNow OAuth credential ID |
| `tableName` | string | Yes | Table name |
| `sysId` | string | No | Specific record sys_id |
| `number` | string | No | Record number \(e.g., INC0010001\) |
@@ -86,9 +70,8 @@ Update an existing record in a ServiceNow table
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
| `username` | string | Yes | ServiceNow username |
| `password` | string | Yes | ServiceNow password |
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
| `credential` | string | No | ServiceNow OAuth credential ID |
| `tableName` | string | Yes | Table name |
| `sysId` | string | Yes | Record sys_id to update |
| `fields` | json | Yes | Fields to update \(JSON object\) |
@@ -108,9 +91,8 @@ Delete a record from a ServiceNow table
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Yes | ServiceNow instance URL \(e.g., https://instance.service-now.com\) |
| `username` | string | Yes | ServiceNow username |
| `password` | string | Yes | ServiceNow password |
| `instanceUrl` | string | No | ServiceNow instance URL \(auto-detected from OAuth if not provided\) |
| `credential` | string | No | ServiceNow OAuth credential ID |
| `tableName` | string | Yes | Table name |
| `sysId` | string | Yes | Record sys_id to delete |

View File

@@ -114,7 +114,7 @@ Read the latest messages from Slack channels. Retrieve conversation history with
| `botToken` | string | No | Bot token for Custom Bot |
| `channel` | string | No | Slack channel to read messages from \(e.g., #general\) |
| `userId` | string | No | User ID for DM conversation \(e.g., U1234567890\) |
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 15\) |
| `limit` | number | No | Number of messages to retrieve \(default: 10, max: 100\) |
| `oldest` | string | No | Start of time range \(timestamp\) |
| `latest` | string | No | End of time range \(timestamp\) |

View File

@@ -50,8 +50,6 @@ Send a chat completion request to any supported LLM provider
| `maxTokens` | number | No | Maximum tokens in the response |
| `azureEndpoint` | string | No | Azure OpenAI endpoint URL |
| `azureApiVersion` | string | No | Azure OpenAI API version |
| `vertexProject` | string | No | Google Cloud project ID for Vertex AI |
| `vertexLocation` | string | No | Google Cloud location for Vertex AI \(defaults to us-central1\) |
#### Output

View File

@@ -111,24 +111,26 @@ Diferentes tipos de bloques producen diferentes estructuras de salida. Esto es l
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### Campos de salida del bloque de condición
- **conditionResult**: resultado booleano de la evaluación de la condición
- **selectedPath**: información sobre la ruta seleccionada
- **content**: El contenido original que se transmite
- **conditionResult**: Resultado booleano de la evaluación de la condición
- **selectedPath**: Información sobre la ruta seleccionada
- **blockId**: ID del siguiente bloque en la ruta seleccionada
- **blockType**: tipo del siguiente bloque
- **blockTitle**: título del siguiente bloque
- **selectedOption**: ID de la condición seleccionada
- **blockType**: Tipo del siguiente bloque
- **blockTitle**: Título del siguiente bloque
- **selectedConditionId**: ID de la condición seleccionada
</Tab>
<Tab>

View File

@@ -89,31 +89,24 @@ Escribir una incidencia de Jira
#### Entrada
| Parámetro | Tipo | Obligatorio | Descripción |
| --------- | ---- | ----------- | ----------- |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
| `projectId` | string | Sí | ID del proyecto para la incidencia |
| `summary` | string | Sí | Resumen de la incidencia |
| `description` | string | No | Descripción de la incidencia |
| `priority` | string | No | ID o nombre de prioridad para la incidencia \(p. ej., "10000" o "Alta"\) |
| `assignee` | string | No | ID de cuenta del asignado para la incidencia |
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
| `priority` | string | No | Prioridad de la incidencia |
| `assignee` | string | No | Asignado para la incidencia |
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá utilizando el dominio. |
| `issueType` | string | Sí | Tipo de incidencia a crear \(p. ej., Tarea, Historia\) |
| `labels` | array | No | Etiquetas para la incidencia \(array de nombres de etiquetas\) |
| `duedate` | string | No | Fecha de vencimiento para la incidencia \(formato: AAAA-MM-DD\) |
| `reporter` | string | No | ID de cuenta del informador para la incidencia |
| `environment` | string | No | Información del entorno para la incidencia |
| `customFieldId` | string | No | ID del campo personalizado \(p. ej., customfield_10001\) |
| `customFieldValue` | string | No | Valor para el campo personalizado |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `ts` | string | Marca de tiempo de la operación |
| `issueKey` | string | Clave de la incidencia creada \(p. ej., PROJ-123\) |
| `issueKey` | string | Clave de la incidencia creada (p. ej., PROJ-123) |
| `summary` | string | Resumen de la incidencia |
| `url` | string | URL de la incidencia creada |
| `assigneeId` | string | ID de cuenta del usuario asignado \(si está asignado\) |
### `jira_bulk_read`
@@ -527,30 +520,6 @@ Eliminar un observador de una incidencia de Jira
| `issueKey` | string | Clave de incidencia |
| `watcherAccountId` | string | ID de cuenta del observador eliminado |
### `jira_get_users`
Obtener usuarios de Jira. Si se proporciona un ID de cuenta, devuelve un solo usuario. De lo contrario, devuelve una lista de todos los usuarios.
#### Entrada
| Parámetro | Tipo | Obligatorio | Descripción |
| --------- | ---- | ----------- | ----------- |
| `domain` | string | Sí | Tu dominio de Jira \(p. ej., tuempresa.atlassian.net\) |
| `accountId` | string | No | ID de cuenta opcional para obtener un usuario específico. Si no se proporciona, devuelve todos los usuarios. |
| `startAt` | number | No | El índice del primer usuario a devolver \(para paginación, predeterminado: 0\) |
| `maxResults` | number | No | Número máximo de usuarios a devolver \(predeterminado: 50\) |
| `cloudId` | string | No | ID de Jira Cloud para la instancia. Si no se proporciona, se obtendrá usando el dominio. |
#### Salida
| Parámetro | Tipo | Descripción |
| --------- | ---- | ----------- |
| `ts` | string | Marca de tiempo de la operación |
| `users` | json | Array de usuarios con accountId, displayName, emailAddress, estado activo y avatarUrls |
| `total` | number | Número total de usuarios devueltos |
| `startAt` | number | Índice de inicio de paginación |
| `maxResults` | number | Máximo de resultados por página |
## Notas
- Categoría: `tools`

View File

@@ -1,6 +1,6 @@
---
title: ServiceNow
description: Crear, leer, actualizar y eliminar registros de ServiceNow
description: Crea, lee, actualiza, elimina e importa masivamente registros de ServiceNow
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,37 +10,23 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/) es una potente plataforma en la nube diseñada para optimizar y automatizar la gestión de servicios de TI (ITSM), flujos de trabajo y procesos empresariales en toda tu organización. ServiceNow te permite gestionar incidencias, solicitudes, tareas, usuarios y más utilizando su amplia API.
Con ServiceNow, puedes:
- **Automatizar flujos de trabajo de TI**: crear, leer, actualizar y eliminar registros en cualquier tabla de ServiceNow, como incidencias, tareas, solicitudes de cambio y usuarios.
- **Integrar sistemas**: conectar ServiceNow con tus otras herramientas y procesos para una automatización fluida.
- **Mantener una única fuente de verdad**: mantener todos tus datos de servicio y operaciones organizados y accesibles.
- **Impulsar la eficiencia operativa**: reducir el trabajo manual y mejorar la calidad del servicio con flujos de trabajo personalizables y automatización.
En Sim, la integración de ServiceNow permite que tus agentes interactúen directamente con tu instancia de ServiceNow como parte de sus flujos de trabajo. Los agentes pueden crear, leer, actualizar o eliminar registros en cualquier tabla de ServiceNow y aprovechar datos de tickets o usuarios para automatización y toma de decisiones sofisticadas. Esta integración conecta tu automatización de flujos de trabajo y operaciones de TI, permitiendo que tus agentes gestionen solicitudes de servicio, incidencias, usuarios y activos sin intervención manual. Al conectar Sim con ServiceNow, puedes automatizar tareas de gestión de servicios, mejorar los tiempos de respuesta y garantizar un acceso consistente y seguro a los datos de servicio vitales de tu organización.
{/* MANUAL-CONTENT-END */}
## Instrucciones de uso
Integra ServiceNow en tu flujo de trabajo. Crea, lee, actualiza y elimina registros en cualquier tabla de ServiceNow, incluyendo incidencias, tareas, solicitudes de cambio, usuarios y más.
Integra ServiceNow en tu flujo de trabajo. Puede crear, leer, actualizar y eliminar registros en cualquier tabla de ServiceNow (incidentes, tareas, usuarios, etc.). Admite operaciones de importación masiva para migración de datos y ETL.
## Herramientas
### `servicenow_create_record`
Crear un nuevo registro en una tabla de ServiceNow
Crea un nuevo registro en una tabla de ServiceNow
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(p. ej., https://instance.service-now.com\) |
| `username` | string | | Nombre de usuario de ServiceNow |
| `password` | string | Sí | Contraseña de ServiceNow |
| `tableName` | string | Sí | Nombre de la tabla \(p. ej., incident, task, sys_user\) |
| `instanceUrl` | string | Sí | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
| `tableName` | string | Sí | Nombre de la tabla \(ej., incident, task, sys_user\) |
| `fields` | json | Sí | Campos a establecer en el registro \(objeto JSON\) |
#### Salida
@@ -52,19 +38,18 @@ Crear un nuevo registro en una tabla de ServiceNow
### `servicenow_read_record`
Leer registros de una tabla de ServiceNow
Lee registros de una tabla de ServiceNow
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | URL de la instancia de ServiceNow \(p. ej., https://instance.service-now.com\) |
| `username` | string | | Nombre de usuario de ServiceNow |
| `password` | string | Sí | Contraseña de ServiceNow |
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
| `tableName` | string | Sí | Nombre de la tabla |
| `sysId` | string | No | sys_id del registro específico |
| `number` | string | No | Número de registro \(p. ej., INC0010001\) |
| `query` | string | No | Cadena de consulta codificada \(p. ej., "active=true^priority=1"\) |
| `sysId` | string | No | sys_id específico del registro |
| `number` | string | No | Número de registro \(ej., INC0010001\) |
| `query` | string | No | Cadena de consulta codificada \(ej., "active=true^priority=1"\) |
| `limit` | number | No | Número máximo de registros a devolver |
| `fields` | string | No | Lista de campos separados por comas a devolver |
@@ -77,15 +62,14 @@ Leer registros de una tabla de ServiceNow
### `servicenow_update_record`
Actualiza un registro existente en una tabla de ServiceNow
Actualizar un registro existente en una tabla de ServiceNow
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
| `username` | string | | Nombre de usuario de ServiceNow |
| `password` | string | Sí | Contraseña de ServiceNow |
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
| `tableName` | string | Sí | Nombre de la tabla |
| `sysId` | string | Sí | sys_id del registro a actualizar |
| `fields` | json | Sí | Campos a actualizar \(objeto JSON\) |
@@ -99,15 +83,14 @@ Actualiza un registro existente en una tabla de ServiceNow
### `servicenow_delete_record`
Elimina un registro de una tabla de ServiceNow
Eliminar un registro de una tabla de ServiceNow
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | URL de la instancia de ServiceNow \(ej., https://instance.service-now.com\) |
| `username` | string | | Nombre de usuario de ServiceNow |
| `password` | string | Sí | Contraseña de ServiceNow |
| `instanceUrl` | string | No | URL de la instancia de ServiceNow \(detectada automáticamente desde OAuth si no se proporciona\) |
| `credential` | string | No | ID de credencial OAuth de ServiceNow |
| `tableName` | string | Sí | Nombre de la tabla |
| `sysId` | string | Sí | sys_id del registro a eliminar |

View File

@@ -111,8 +111,8 @@ Lee los últimos mensajes de los canales de Slack. Recupera el historial de conv
| `authMethod` | string | No | Método de autenticación: oauth o bot_token |
| `botToken` | string | No | Token del bot para Bot personalizado |
| `channel` | string | No | Canal de Slack del que leer mensajes (p. ej., #general) |
| `userId` | string | No | ID de usuario para conversación de mensaje directo (p. ej., U1234567890) |
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 15) |
| `userId` | string | No | ID de usuario para conversación por MD (p. ej., U1234567890) |
| `limit` | number | No | Número de mensajes a recuperar (predeterminado: 10, máx: 100) |
| `oldest` | string | No | Inicio del rango de tiempo (marca de tiempo) |
| `latest` | string | No | Fin del rango de tiempo (marca de tiempo) |

View File

@@ -37,18 +37,16 @@ Envía una solicitud de completado de chat a cualquier proveedor de LLM compatib
#### Entrada
| Parámetro | Tipo | Requerido | Descripción |
| Parámetro | Tipo | Obligatorio | Descripción |
| --------- | ---- | -------- | ----------- |
| `model` | string | Sí | El modelo a utilizar \(ej., gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `model` | string | Sí | El modelo a utilizar \(p. ej., gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `systemPrompt` | string | No | Prompt del sistema para establecer el comportamiento del asistente |
| `context` | string | Sí | El mensaje del usuario o contexto a enviar al modelo |
| `apiKey` | string | No | Clave API del proveedor \(usa la clave de la plataforma si no se proporciona para modelos alojados\) |
| `context` | string | Sí | El mensaje del usuario o contexto para enviar al modelo |
| `apiKey` | string | No | Clave API para el proveedor \(usa la clave de la plataforma si no se proporciona para modelos alojados\) |
| `temperature` | number | No | Temperatura para la generación de respuestas \(0-2\) |
| `maxTokens` | number | No | Tokens máximos en la respuesta |
| `azureEndpoint` | string | No | URL del endpoint de Azure OpenAI |
| `azureApiVersion` | string | No | Versión de la API de Azure OpenAI |
| `vertexProject` | string | No | ID del proyecto de Google Cloud para Vertex AI |
| `vertexLocation` | string | No | Ubicación de Google Cloud para Vertex AI \(por defecto us-central1\) |
#### Salida

View File

@@ -111,24 +111,26 @@ Différents types de blocs produisent différentes structures de sortie. Voici c
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### Champs de sortie du bloc de condition
- **content** : le contenu original transmis
- **conditionResult** : résultat booléen de l'évaluation de la condition
- **selectedPath** : informations sur le chemin sélectionné
- **blockId** : ID du bloc suivant dans le chemin sélectionné
- **blockType** : type du bloc suivant
- **blockTitle** : titre du bloc suivant
- **selectedOption** : ID de la condition sélectionnée
- **selectedConditionId** : ID de la condition sélectionnée
</Tab>
<Tab>

View File

@@ -89,21 +89,15 @@ Rédiger une demande Jira
#### Entrée
| Paramètre | Type | Obligatoire | Description |
| --------- | ---- | ----------- | ----------- |
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
| `projectId` | chaîne | Oui | ID du projet pour le ticket |
| `summary` | chaîne | Oui | Résumé du ticket |
| `description` | chaîne | Non | Description du ticket |
| `priority` | chaîne | Non | ID ou nom de la priorité du ticket \(ex. : "10000" ou "Haute"\) |
| `assignee` | chaîne | Non | ID de compte de l'assigné pour le ticket |
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
| `issueType` | chaîne | Oui | Type de ticket à créer \(ex. : tâche, story\) |
| `labels` | tableau | Non | Étiquettes pour le ticket \(tableau de noms d'étiquettes\) |
| `duedate` | chaîne | Non | Date d'échéance du ticket \(format : AAAA-MM-JJ\) |
| `reporter` | chaîne | Non | ID de compte du rapporteur pour le ticket |
| `environment` | chaîne | Non | Informations d'environnement pour le ticket |
| `customFieldId` | chaîne | Non | ID du champ personnalisé \(ex. : customfield_10001\) |
| `customFieldValue` | chaîne | Non | Valeur pour le champ personnalisé |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Oui | Votre domaine Jira (ex. : votreentreprise.atlassian.net) |
| `projectId` | string | Oui | ID du projet pour la demande |
| `summary` | string | Oui | Résumé de la demande |
| `description` | string | Non | Description de la demande |
| `priority` | string | Non | Priorité de la demande |
| `assignee` | string | Non | Assigné de la demande |
| `cloudId` | string | Non | ID Jira Cloud pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
| `issueType` | string | Oui | Type de demande à créer (ex. : Tâche, Story) |
#### Sortie
@@ -113,7 +107,6 @@ Rédiger une demande Jira
| `issueKey` | chaîne | Clé du ticket créé \(ex. : PROJ-123\) |
| `summary` | chaîne | Résumé du ticket |
| `url` | chaîne | URL vers le ticket créé |
| `assigneeId` | chaîne | ID de compte de l'utilisateur assigné \(si assigné\) |
### `jira_bulk_read`
@@ -527,31 +520,7 @@ Supprimer un observateur d'un ticket Jira
| `issueKey` | string | Clé du ticket |
| `watcherAccountId` | string | ID du compte observateur supprimé |
### `jira_get_users`
Récupère les utilisateurs Jira. Si un ID de compte est fourni, renvoie un seul utilisateur. Sinon, renvoie une liste de tous les utilisateurs.
#### Entrée
| Paramètre | Type | Obligatoire | Description |
| --------- | ---- | ----------- | ----------- |
| `domain` | chaîne | Oui | Votre domaine Jira \(ex. : votreentreprise.atlassian.net\) |
| `accountId` | chaîne | Non | ID de compte optionnel pour obtenir un utilisateur spécifique. S'il n'est pas fourni, renvoie tous les utilisateurs. |
| `startAt` | nombre | Non | L'index du premier utilisateur à renvoyer \(pour la pagination, par défaut : 0\) |
| `maxResults` | nombre | Non | Nombre maximum d'utilisateurs à renvoyer \(par défaut : 50\) |
| `cloudId` | chaîne | Non | ID Cloud Jira pour l'instance. S'il n'est pas fourni, il sera récupéré à l'aide du domaine. |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `ts` | chaîne | Horodatage de l'opération |
| `users` | json | Tableau d'utilisateurs avec accountId, displayName, emailAddress, statut actif et avatarUrls |
| `total` | nombre | Nombre total d'utilisateurs renvoyés |
| `startAt` | nombre | Index de début de pagination |
| `maxResults` | nombre | Nombre maximum de résultats par page |
## Remarques
## Notes
- Catégorie : `tools`
- Type : `jira`

View File

@@ -1,6 +1,7 @@
---
title: ServiceNow
description: Créer, lire, mettre à jour et supprimer des enregistrements ServiceNow
description: Créer, lire, mettre à jour, supprimer et importer en masse des
enregistrements ServiceNow
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,22 +11,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/) est une plateforme cloud puissante conçue pour rationaliser et automatiser la gestion des services informatiques (ITSM), les workflows et les processus métier au sein de votre organisation. ServiceNow vous permet de gérer les incidents, les demandes, les tâches, les utilisateurs et bien plus encore grâce à son API étendue.
Avec ServiceNow, vous pouvez :
- **Automatiser les workflows informatiques** : créer, lire, mettre à jour et supprimer des enregistrements dans n'importe quelle table ServiceNow, tels que les incidents, les tâches, les demandes de changement et les utilisateurs.
- **Intégrer les systèmes** : connecter ServiceNow avec vos autres outils et processus pour une automatisation transparente.
- **Maintenir une source unique de vérité** : garder toutes vos données de service et d'exploitation organisées et accessibles.
- **Améliorer l'efficacité opérationnelle** : réduire le travail manuel et améliorer la qualité du service grâce à des workflows personnalisables et à l'automatisation.
Dans Sim, l'intégration ServiceNow permet à vos agents d'interagir directement avec votre instance ServiceNow dans le cadre de leurs workflows. Les agents peuvent créer, lire, mettre à jour ou supprimer des enregistrements dans n'importe quelle table ServiceNow et exploiter les données de tickets ou d'utilisateurs pour une automatisation et une prise de décision sophistiquées. Cette intégration relie votre automatisation de workflow et vos opérations informatiques, permettant à vos agents de gérer les demandes de service, les incidents, les utilisateurs et les actifs sans intervention manuelle. En connectant Sim avec ServiceNow, vous pouvez automatiser les tâches de gestion des services, améliorer les temps de réponse et garantir un accès cohérent et sécurisé aux données de service vitales de votre organisation.
{/* MANUAL-CONTENT-END */}
## Instructions d'utilisation
Intégrez ServiceNow dans votre workflow. Créez, lisez, mettez à jour et supprimez des enregistrements dans n'importe quelle table ServiceNow, y compris les incidents, les tâches, les demandes de changement, les utilisateurs et bien plus encore.
Intégrez ServiceNow dans votre flux de travail. Permet de créer, lire, mettre à jour et supprimer des enregistrements dans n'importe quelle table ServiceNow (incidents, tâches, utilisateurs, etc.). Prend en charge les opérations d'importation en masse pour la migration de données et l'ETL.
## Outils
@@ -37,11 +25,10 @@ Créer un nouvel enregistrement dans une table ServiceNow
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow (par ex., https://instance.service-now.com) |
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
| `password` | string | Oui | Mot de passe ServiceNow |
| `tableName` | string | Oui | Nom de la table (par ex., incident, task, sys_user) |
| `fields` | json | Oui | Champs à définir sur l'enregistrement (objet JSON) |
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
| `credential` | string | Non | ID d'identification OAuth ServiceNow |
| `tableName` | string | Oui | Nom de la table \(par exemple, incident, task, sys_user\) |
| `fields` | json | Oui | Champs à définir sur l'enregistrement \(objet JSON\) |
#### Sortie
@@ -58,21 +45,20 @@ Lire des enregistrements d'une table ServiceNow
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow (par ex., https://instance.service-now.com) |
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
| `password` | string | Oui | Mot de passe ServiceNow |
| `instanceUrl` | string | Non | URL de l'instance ServiceNow \(détectée automatiquement depuis OAuth si non fournie\) |
| `credential` | string | Non | ID d'identification OAuth ServiceNow |
| `tableName` | string | Oui | Nom de la table |
| `sysId` | string | Non | sys_id d'enregistrement spécifique |
| `number` | string | Non | Numéro d'enregistrement (par ex., INC0010001) |
| `query` | string | Non | Chaîne de requête encodée (par ex., "active=true^priority=1") |
| `sysId` | string | Non | sys_id spécifique de l'enregistrement |
| `number` | string | Non | Numéro d'enregistrement \(par exemple, INC0010001\) |
| `query` | string | Non | Chaîne de requête encodée \(par exemple, "active=true^priority=1"\) |
| `limit` | number | Non | Nombre maximum d'enregistrements à retourner |
| `fields` | string | Non | Liste de champs à retourner, séparés par des virgules |
| `fields` | string | Non | Liste de champs séparés par des virgules à retourner |
#### Sortie
| Paramètre | Type | Description |
| --------- | ---- | ----------- |
| `records` | array | Tableau d'enregistrements ServiceNow |
| `records` | array | Tableau des enregistrements ServiceNow |
| `metadata` | json | Métadonnées de l'opération |
### `servicenow_update_record`
@@ -83,12 +69,11 @@ Mettre à jour un enregistrement existant dans une table ServiceNow
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
| `password` | string | Oui | Mot de passe ServiceNow |
| `instanceUrl` | string | Non | URL de l'instance ServiceNow (détectée automatiquement depuis OAuth si non fournie) |
| `credential` | string | Non | ID des identifiants OAuth ServiceNow |
| `tableName` | string | Oui | Nom de la table |
| `sysId` | string | Oui | sys_id de l'enregistrement à mettre à jour |
| `fields` | json | Oui | Champs à mettre à jour \(objet JSON\) |
| `fields` | json | Oui | Champs à mettre à jour (objet JSON) |
#### Sortie
@@ -105,9 +90,8 @@ Supprimer un enregistrement d'une table ServiceNow
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | Oui | URL de l'instance ServiceNow \(par exemple, https://instance.service-now.com\) |
| `username` | string | Oui | Nom d'utilisateur ServiceNow |
| `password` | string | Oui | Mot de passe ServiceNow |
| `instanceUrl` | string | Non | URL de l'instance ServiceNow (détectée automatiquement depuis OAuth si non fournie) |
| `credential` | string | Non | ID des identifiants OAuth ServiceNow |
| `tableName` | string | Oui | Nom de la table |
| `sysId` | string | Oui | sys_id de l'enregistrement à supprimer |
@@ -118,7 +102,7 @@ Supprimer un enregistrement d'une table ServiceNow
| `success` | boolean | Indique si la suppression a réussi |
| `metadata` | json | Métadonnées de l'opération |
## Remarques
## Notes
- Catégorie : `tools`
- Type : `servicenow`

View File

@@ -107,14 +107,14 @@ Lisez les derniers messages des canaux Slack. Récupérez l'historique des conve
#### Entrée
| Paramètre | Type | Obligatoire | Description |
| --------- | ---- | ----------- | ----------- |
| --------- | ---- | ---------- | ----------- |
| `authMethod` | chaîne | Non | Méthode d'authentification : oauth ou bot_token |
| `botToken` | chaîne | Non | Jeton du bot pour Bot personnalisé |
| `channel` | chaîne | Non | Canal Slack depuis lequel lire les messages \(ex. : #general\) |
| `userId` | chaîne | Non | ID utilisateur pour la conversation en message direct \(ex. : U1234567890\) |
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 15\) |
| `oldest` | chaîne | Non | Début de la plage horaire \(horodatage\) |
| `latest` | chaîne | Non | Fin de la plage horaire \(horodatage\) |
| `channel` | chaîne | Non | Canal Slack pour lire les messages \(ex. : #general\) |
| `userId` | chaîne | Non | ID utilisateur pour la conversation en MP \(ex. : U1234567890\) |
| `limit` | nombre | Non | Nombre de messages à récupérer \(par défaut : 10, max : 100\) |
| `oldest` | chaîne | Non | Début de la plage temporelle \(horodatage\) |
| `latest` | chaîne | Non | Fin de la plage temporelle \(horodatage\) |
#### Sortie

View File

@@ -37,18 +37,16 @@ Envoyez une requête de complétion de chat à n'importe quel fournisseur de LLM
#### Entrée
| Paramètre | Type | Requis | Description |
| --------- | ---- | -------- | ----------- |
| `model` | string | Oui | Le modèle à utiliser \(par exemple, gpt-4o, claude-sonnet-4-5, gemini-2.0-flash\) |
| `systemPrompt` | string | Non | Prompt système pour définir le comportement de l'assistant |
| `context` | string | Oui | Le message utilisateur ou le contexte à envoyer au modèle |
| `apiKey` | string | Non | Clé API pour le fournisseur \(utilise la clé de la plateforme si non fournie pour les modèles hébergés\) |
| `temperature` | number | Non | Température pour la génération de réponse \(0-2\) |
| `maxTokens` | number | Non | Nombre maximum de tokens dans la réponse |
| `azureEndpoint` | string | Non | URL du point de terminaison Azure OpenAI |
| `azureApiVersion` | string | Non | Version de l'API Azure OpenAI |
| `vertexProject` | string | Non | ID du projet Google Cloud pour Vertex AI |
| `vertexLocation` | string | Non | Emplacement Google Cloud pour Vertex AI \(par défaut us-central1\) |
| Paramètre | Type | Obligatoire | Description |
| --------- | ---- | ---------- | ----------- |
| `model` | chaîne | Oui | Le modèle à utiliser (ex. : gpt-4o, claude-sonnet-4-5, gemini-2.0-flash) |
| `systemPrompt` | chaîne | Non | Instruction système pour définir le comportement de l'assistant |
| `context` | chaîne | Oui | Le message utilisateur ou le contexte à envoyer au modèle |
| `apiKey` | chaîne | Non | Clé API pour le fournisseur (utilise la clé de plateforme si non fournie pour les modèles hébergés) |
| `temperature` | nombre | Non | Température pour la génération de réponse (0-2) |
| `maxTokens` | nombre | Non | Nombre maximum de tokens dans la réponse |
| `azureEndpoint` | chaîne | Non | URL du point de terminaison Azure OpenAI |
| `azureApiVersion` | chaîne | Non | Version de l'API Azure OpenAI |
#### Sortie

View File

@@ -110,24 +110,26 @@ import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### 条件ブロックの出力フィールド
- **content**: そのまま渡される元のコンテンツ
- **conditionResult**: 条件評価の真偽値結果
- **selectedPath**: 選択されたパスに関する情報
- **blockId**: 選択されたパスの次のブロックのID
- **blockType**: 次のブロックのタイプ
- **blockTitle**: 次のブロックのタイトル
- **selectedOption**: 選択された条件のID
- **selectedConditionId**: 選択された条件のID
</Tab>
<Tab>

View File

@@ -94,16 +94,10 @@ Jira課題を作成する
| `projectId` | string | はい | 課題のプロジェクトID |
| `summary` | string | はい | 課題の要約 |
| `description` | string | いいえ | 課題の説明 |
| `priority` | string | いいえ | 課題の優先度IDまたは名前「10000」または「高」 |
| `assignee` | string | いいえ | 課題の担当者アカウントID |
| `priority` | string | いいえ | 課題の優先度 |
| `assignee` | string | いいえ | 課題の担当者 |
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
| `issueType` | string | はい | 作成する課題のタイプ(例:タスク、ストーリー) |
| `labels` | array | いいえ | 課題のラベル(ラベル名の配列) |
| `duedate` | string | いいえ | 課題の期限形式YYYY-MM-DD |
| `reporter` | string | いいえ | 課題の報告者アカウントID |
| `environment` | string | いいえ | 課題の環境情報 |
| `customFieldId` | string | いいえ | カスタムフィールドIDcustomfield_10001 |
| `customFieldValue` | string | いいえ | カスタムフィールドの値 |
#### 出力
@@ -112,8 +106,7 @@ Jira課題を作成する
| `ts` | string | 操作のタイムスタンプ |
| `issueKey` | string | 作成された課題キーPROJ-123 |
| `summary` | string | 課題の要約 |
| `url` | string | 作成された課題のURL |
| `assigneeId` | string | 割り当てられたユーザーのアカウントID割り当てられている場合 |
| `url` | string | 作成された課題のURL |
### `jira_bulk_read`
@@ -527,31 +520,7 @@ Jira課題からウォッチャーを削除する
| `issueKey` | string | 課題キー |
| `watcherAccountId` | string | 削除されたウォッチャーのアカウントID |
### `jira_get_users`
## 注意事項
Jiraユーザーを取得します。アカウントIDが提供された場合、単一のユーザーを返します。それ以外の場合、すべてのユーザーのリストを返します。
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `domain` | string | はい | あなたのJiraドメインyourcompany.atlassian.net |
| `accountId` | string | いいえ | 特定のユーザーを取得するためのオプションのアカウントID。提供されない場合、すべてのユーザーを返します。 |
| `startAt` | number | いいえ | 返す最初のユーザーのインデックスページネーション用、デフォルト0 |
| `maxResults` | number | いいえ | 返すユーザーの最大数デフォルト50 |
| `cloudId` | string | いいえ | インスタンスのJira Cloud ID。提供されない場合、ドメインを使用して取得されます。 |
#### 出力
| パラメータ | 型 | 説明 |
| --------- | ---- | ----------- |
| `ts` | string | 操作のタイムスタンプ |
| `users` | json | accountId、displayName、emailAddress、activeステータス、avatarUrlsを含むユーザーの配列 |
| `total` | number | 返されたユーザーの総数 |
| `startAt` | number | ページネーション開始インデックス |
| `maxResults` | number | ページあたりの最大結果数 |
## 注記
- カテゴリ:`tools`
- タイプ:`jira`
- カテゴリー: `tools`
- タイプ: `jira`

View File

@@ -1,6 +1,6 @@
---
title: ServiceNow
description: ServiceNowレコードの作成、読み取り、更新、削除
description: ServiceNowレコードの作成、読み取り、更新、削除、一括インポート
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,22 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/)は、組織全体のITサービス管理ITSM、ワークフロー、ビジネスプロセスを効率化し自動化するために設計された強力なクラウドプラットフォームです。ServiceNowを使用すると、広範なAPIを使用してインシデント、リクエスト、タスク、ユーザーなどを管理できます。
ServiceNowでは、次のことができます。
- **ITワークフローの自動化**: インシデント、タスク、変更リクエスト、ユーザーなど、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除します。
- **システムの統合**: ServiceNowを他のツールやプロセスと接続して、シームレスな自動化を実現します。
- **単一の信頼できる情報源の維持**: すべてのサービスおよび運用データを整理してアクセス可能な状態に保ちます。
- **運用効率の向上**: カスタマイズ可能なワークフローと自動化により、手作業を削減し、サービス品質を向上させます。
Simでは、ServiceNow統合により、エージェントがワークフローの一部としてServiceNowインスタンスと直接やり取りできるようになります。エージェントは、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除でき、チケットやユーザーデータを活用して高度な自動化と意思決定を行うことができます。この統合により、ワークフロー自動化とIT運用が橋渡しされ、エージェントは手動介入なしでサービスリクエスト、インシデント、ユーザー、資産を管理できるようになります。SimとServiceNowを接続することで、サービス管理タスクを自動化し、応答時間を改善し、組織の重要なサービスデータへの一貫性のある安全なアクセスを確保できます。
{/* MANUAL-CONTENT-END */}
## 使用方法
ServiceNowをワークフローに統合します。インシデント、タスク、変更リクエスト、ユーザーなど、任意のServiceNowテーブルのレコードを作成、読み取り、更新、削除します。
ServiceNowをワークフローに統合します。任意のServiceNowテーブルインシデント、タスク、ユーザーなどのレコードを作成、読み取り、更新、削除できます。データ移行とETLのための一括インポート操作をサポートします。
## ツール
@@ -37,11 +24,10 @@ ServiceNowテーブルに新しいレコードを作成
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例: https://instance.service-now.com) |
| `username` | string | い | ServiceNowユーザー名 |
| `password` | string | はい | ServiceNowパスワード |
| `tableName` | string | はい | テーブル名(例: incident、task、sys_user) |
| `fields` | json | はい | レコードに設定するフィールド(JSONオブジェクト) |
| `instanceUrl` | string | はい | ServiceNowインスタンスURL(例:https://instance.service-now.com |
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
| `tableName` | string | はい | テーブル名incident、task、sys_user |
| `fields` | json | はい | レコードに設定するフィールドJSONオブジェクト |
#### 出力
@@ -52,20 +38,19 @@ ServiceNowテーブルに新しいレコードを作成
### `servicenow_read_record`
ServiceNowテーブルからレコードを読み取ります
ServiceNowテーブルからレコードを読み取り
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | い | ServiceNowインスタンスURL(例: https://instance.service-now.com) |
| `username` | string | い | ServiceNowユーザー名 |
| `password` | string | はい | ServiceNowパスワード |
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL指定されていない場合はOAuthから自動検出 |
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
| `tableName` | string | はい | テーブル名 |
| `sysId` | string | いいえ | 特定のレコードsys_id |
| `number` | string | いいえ | レコード番号(例: INC0010001) |
| `query` | string | いいえ | エンコードされたクエリ文字列(例: "active=true^priority=1") |
| `limit` | number | いいえ | 返すレコードの最大数 |
| `sysId` | string | いいえ | 特定のレコードsys_id |
| `number` | string | いいえ | レコード番号(例:INC0010001 |
| `query` | string | いいえ | エンコードされたクエリ文字列(例:"active=true^priority=1" |
| `limit` | number | いいえ | 返す最大レコード数 |
| `fields` | string | いいえ | 返すフィールドのカンマ区切りリスト |
#### 出力
@@ -77,18 +62,17 @@ ServiceNowテーブルからレコードを読み取ります
### `servicenow_update_record`
ServiceNowテーブル内の既存のレコードを更新
ServiceNowテーブル内の既存のレコードを更新します
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | い | ServiceNowインスタンスURLhttps://instance.service-now.com |
| `username` | string | い | ServiceNowユーザー名 |
| `password` | string | はい | ServiceNowパスワード |
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL(指定されていない場合はOAuthから自動検出) |
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
| `tableName` | string | はい | テーブル名 |
| `sysId` | string | はい | 更新するレコードのsys_id |
| `fields` | json | はい | 更新するフィールドJSONオブジェクト |
| `fields` | json | はい | 更新するフィールド(JSONオブジェクト) |
#### 出力
@@ -99,15 +83,14 @@ ServiceNowテーブル内の既存のレコードを更新
### `servicenow_delete_record`
ServiceNowテーブルからレコードを削除
ServiceNowテーブルからレコードを削除します
#### 入力
| パラメータ | 型 | 必須 | 説明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | い | ServiceNowインスタンスURLhttps://instance.service-now.com |
| `username` | string | い | ServiceNowユーザー名 |
| `password` | string | はい | ServiceNowパスワード |
| `instanceUrl` | string | いいえ | ServiceNowインスタンスURL(指定されていない場合はOAuthから自動検出) |
| `credential` | string | いいえ | ServiceNow OAuth認証情報ID |
| `tableName` | string | はい | テーブル名 |
| `sysId` | string | はい | 削除するレコードのsys_id |
@@ -118,7 +101,7 @@ ServiceNowテーブルからレコードを削除
| `success` | boolean | 削除が成功したかどうか |
| `metadata` | json | 操作メタデータ |
## 注意事項
## 注
- カテゴリ: `tools`
- カテゴリ: `tools`
- タイプ: `servicenow`

View File

@@ -110,8 +110,8 @@ Slackチャンネルから最新のメッセージを読み取ります。フィ
| `authMethod` | string | いいえ | 認証方法oauthまたはbot_token |
| `botToken` | string | いいえ | カスタムボット用のボットトークン |
| `channel` | string | いいえ | メッセージを読み取るSlackチャンネル#general |
| `userId` | string | いいえ | DM会話用のユーザーIDU1234567890 |
| `limit` | number | いいえ | 取得するメッセージ数デフォルト10、最大15 |
| `userId` | string | いいえ | DM会話用のユーザーIDU1234567890 |
| `limit` | number | いいえ | 取得するメッセージ数デフォルト10、最大100 |
| `oldest` | string | いいえ | 時間範囲の開始(タイムスタンプ) |
| `latest` | string | いいえ | 時間範囲の終了(タイムスタンプ) |

View File

@@ -42,13 +42,11 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `model` | string | はい | 使用するモデルgpt-4o、claude-sonnet-4-5、gemini-2.0-flash |
| `systemPrompt` | string | いいえ | アシスタントの動作を設定するシステムプロンプト |
| `context` | string | はい | モデルに送信するユーザーメッセージまたはコンテキスト |
| `apiKey` | string | いいえ | プロバイダーのAPIキーホストされたモデルの場合、提供されない場合はプラットフォームキーを使用) |
| `apiKey` | string | いいえ | プロバイダーのAPIキーホストされたモデルの場合、提供されなければプラットフォームキーを使用) |
| `temperature` | number | いいえ | レスポンス生成の温度0-2 |
| `maxTokens` | number | いいえ | レスポンスの最大トークン数 |
| `azureEndpoint` | string | いいえ | Azure OpenAIエンドポイントURL |
| `azureApiVersion` | string | いいえ | Azure OpenAI APIバージョン |
| `vertexProject` | string | いいえ | Vertex AI用のGoogle CloudプロジェクトID |
| `vertexLocation` | string | いいえ | Vertex AI用のGoogle Cloudロケーションデフォルトはus-central1 |
#### 出力

View File

@@ -110,24 +110,26 @@ import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
```json
{
"content": "Original content passed through",
"conditionResult": true,
"selectedPath": {
"blockId": "2acd9007-27e8-4510-a487-73d3b825e7c1",
"blockType": "agent",
"blockTitle": "Follow-up Agent"
},
"selectedOption": "condition-1"
"selectedConditionId": "condition-1"
}
```
### 条件模块输出字段
- **conditionResult**:条件判断的布尔值结果
- **selectedPath**:所选路径的信息
- **blockId**:所选路径下一个区块的 ID
- **blockType**下一个块的类型
- **blockTitle**:下一个块的标题
- **selectedOption**:所选条件的 ID
- **content**:传递的原始内容
- **conditionResult**:条件评估的布尔结果
- **selectedPath**:关于选定路径的信息
- **blockId**:选定路径中下一个块的 ID
- **blockType**:下一个块的类型
- **blockTitle**:下一个模块的标题
- **selectedConditionId**:选定条件的 ID
</Tab>
<Tab>

View File

@@ -91,19 +91,13 @@ Jira 的主要功能包括:
| 参数 | 类型 | 必需 | 描述 |
| --------- | ---- | -------- | ----------- |
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如yourcompany.atlassian.net\) |
| `projectId` | 字符串 | 是 | 问题所属项目 ID |
| `summary` | 字符串 | 是 | 问题摘要 |
| `description` | 字符串 | 否 | 问题描述 |
| `priority` | 字符串 | 否 | 问题优先级 ID 或名称 \(例如“10000”或“High”\) |
| `assignee` | 字符串 | 否 | 问题负责人账户 ID |
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供将使用域名获取。 |
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:Task、Story\) |
| `labels` | 数组 | 否 | 问题标签 \(标签名称数组\) |
| `duedate` | 字符串 | 否 | 问题截止日期 \(格式YYYY-MM-DD\) |
| `reporter` | 字符串 | 否 | 问题报告人账户 ID |
| `environment` | 字符串 | 否 | 问题环境信息 |
| `customFieldId` | 字符串 | 否 | 自定义字段 ID \(例如customfield_10001\) |
| `customFieldValue` | 字符串 | 否 | 自定义字段的值 |
| `projectId` | 字符串 | 是 | 问题项目 ID |
| `summary` | 字符串 | 是 | 问题摘要 |
| `description` | 字符串 | 否 | 问题描述 |
| `priority` | 字符串 | 否 | 问题优先级 |
| `assignee` | 字符串 | 否 | 问题负责人 |
| `cloudId` | 字符串 | 否 | 实例的 Jira ID。如果未提供将使用域名获取。 |
| `issueType` | 字符串 | 是 | 要创建的问题类型 \(例如:任务、故事\) |
#### 输出
@@ -113,7 +107,6 @@ Jira 的主要功能包括:
| `issueKey` | 字符串 | 创建的问题键 \(例如PROJ-123\) |
| `summary` | 字符串 | 问题摘要 |
| `url` | 字符串 | 创建的问题的 URL |
| `assigneeId` | 字符串 | 已分配用户的账户 ID如已分配 |
### `jira_bulk_read`
@@ -527,31 +520,7 @@ Jira 的主要功能包括:
| `issueKey` | string | 问题键 |
| `watcherAccountId` | string | 移除的观察者账户 ID |
### `jira_get_users`
## 注意事项
获取 Jira 用户。如果提供了账户 ID则返回单个用户否则返回所有用户的列表。
#### 输入
| 参数 | 类型 | 必需 | 描述 |
| --------- | ---- | -------- | ----------- |
| `domain` | 字符串 | 是 | 您的 Jira 域名 \(例如yourcompany.atlassian.net\) |
| `accountId` | 字符串 | 否 | 可选账户 ID用于获取特定用户。如果未提供则返回所有用户。 |
| `startAt` | 数字 | 否 | 要返回的第一个用户的索引 \(用于分页默认值0\) |
| `maxResults` | 数字 | 否 | 要返回的最大用户数 \(默认值50\) |
| `cloudId` | 字符串 | 否 | 实例的 Jira Cloud ID。如果未提供将使用域名获取。 |
#### 输出
| 参数 | 类型 | 描述 |
| --------- | ---- | ----------- |
| `ts` | 字符串 | 操作的时间戳 |
| `users` | json | 用户数组,包含 accountId、displayName、emailAddress、active 状态和 avatarUrls |
| `total` | 数字 | 返回的用户总数 |
| `startAt` | 数字 | 分页起始索引 |
| `maxResults` | 数字 | 每页最大结果数 |
## 备注
- 分类:`tools`
- 类型:`jira`
- 类别: `tools`
- 类型: `jira`

View File

@@ -1,6 +1,6 @@
---
title: ServiceNow
description: 创建、读取、更新删除 ServiceNow 记录
description: 创建、读取、更新删除及批量导入 ServiceNow 记录
---
import { BlockInfoCard } from "@/components/ui/block-info-card"
@@ -10,22 +10,9 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
color="#032D42"
/>
{/* MANUAL-CONTENT-START:intro */}
[ServiceNow](https://www.servicenow.com/) 是一款强大的云平台,旨在简化和自动化 IT 服务管理ITSM、工作流以及企业各类业务流程。ServiceNow 让您能够通过其强大的 API 管理事件、请求、任务、用户等多种内容。
使用 ServiceNow您可以
- **自动化 IT 工作流**:在任意 ServiceNow 表中创建、读取、更新和删除记录,如事件、任务、变更请求和用户等。
- **集成系统**:将 ServiceNow 与您的其他工具和流程连接,实现无缝自动化。
- **维护单一数据源**:让所有服务和运营数据井然有序,便于访问。
- **提升运营效率**:通过可定制的工作流和自动化,减少手动操作,提高服务质量。
在 Sim 中ServiceNow 集成让您的代理能够在工作流中直接与 ServiceNow 实例交互。代理可以在任意 ServiceNow 表中创建、读取、更新或删除记录,并利用工单或用户数据实现复杂的自动化和决策。这一集成将您的工作流自动化与 IT 运维无缝衔接,使代理能够自动化管理服务请求、事件、用户和资产,无需人工干预。通过将 Sim 与 ServiceNow 连接,您可以自动化服务管理任务、提升响应速度,并确保对组织关键服务数据的持续、安全访问。
{/* MANUAL-CONTENT-END */}
## 使用说明
将 ServiceNow 集成到您的工作流中。在任意 ServiceNow 表(包括事件、任务、变更请求、用户等)中创建、读取、更新和删除记录。
将 ServiceNow 集成到您的工作流中。在任意 ServiceNow 表(事件、任务、用户等)中创建、读取、更新和删除记录。支持批量导入操作,便于数据迁移和 ETL。
## 工具
@@ -35,17 +22,16 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输入
| 参数 | 类型 | 是否必填 | 描述 |
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | 是 | ServiceNow 实例 URL例如https://instance.service-now.com |
| `username` | string | | ServiceNow 用户名 |
| `password` | string | 是 | ServiceNow 密码 |
| `credential` | string | | ServiceNow OAuth 凭证 ID |
| `tableName` | string | 是 | 表名例如incident、task、sys_user |
| `fields` | json | 是 | 记录中要设置的字段JSON 对象) |
| `fields` | json | 是 | 要设置在记录上的字段JSON 对象) |
#### 输出
| 参数 | 类型 | 描述 |
| 参数 | 类型 | 说明 |
| --------- | ---- | ----------- |
| `record` | json | 创建的 ServiceNow 记录,包含 sys_id 及其他字段 |
| `metadata` | json | 操作元数据 |
@@ -56,11 +42,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输入
| 参数 | 类型 | 是否必填 | 描述 |
| 参数 | 类型 | 必填 | 说明 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | ServiceNow 实例 URL例如https://instance.service-now.com |
| `username` | string | | ServiceNow 用户名 |
| `password` | string | 是 | ServiceNow 密码 |
| `instanceUrl` | string | | ServiceNow 实例 URL如未提供,将通过 OAuth 自动检测 |
| `credential` | string | | ServiceNow OAuth 凭证 ID |
| `tableName` | string | 是 | 表名 |
| `sysId` | string | 否 | 指定记录 sys_id |
| `number` | string | 否 | 记录编号例如INC0010001 |
@@ -70,7 +55,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输出
| 参数 | 类型 | 说明 |
| 参数 | 类型 | 描述 |
| --------- | ---- | ----------- |
| `records` | array | ServiceNow 记录数组 |
| `metadata` | json | 操作元数据 |
@@ -81,18 +66,17 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| 参数 | 类型 | 是否必填 | 描述 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | ServiceNow 实例 URL例如https://instance.service-now.com |
| `username` | string | | ServiceNow 用户名 |
| `password` | string | 是 | ServiceNow 密码 |
| `instanceUrl` | string | | ServiceNow 实例 URL如果未提供,将通过 OAuth 自动检测 |
| `credential` | string | | ServiceNow OAuth 凭证 ID |
| `tableName` | string | 是 | 表名 |
| `sysId` | string | 是 | 要更新的记录 sys_id |
| `fields` | json | 是 | 要更新的字段JSON 对象) |
#### 输出
| 参数 | 类型 | 说明 |
| 参数 | 类型 | 描述 |
| --------- | ---- | ----------- |
| `record` | json | 已更新的 ServiceNow 记录 |
| `metadata` | json | 操作元数据 |
@@ -103,11 +87,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输入
| 参数 | 类型 | 必填 | 说明 |
| 参数 | 类型 | 是否必填 | 描述 |
| --------- | ---- | -------- | ----------- |
| `instanceUrl` | string | | ServiceNow 实例 URL例如https://instance.service-now.com |
| `username` | string | | ServiceNow 用户名 |
| `password` | string | 是 | ServiceNow 密码 |
| `instanceUrl` | string | | ServiceNow 实例 URL如果未提供,将通过 OAuth 自动检测 |
| `credential` | string | | ServiceNow OAuth 凭证 ID |
| `tableName` | string | 是 | 表名 |
| `sysId` | string | 是 | 要删除的记录 sys_id |
@@ -118,7 +101,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `success` | boolean | 删除是否成功 |
| `metadata` | json | 操作元数据 |
##
## 注意事项
- 分类:`tools`
- 类型:`servicenow`

View File

@@ -109,10 +109,10 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
| `authMethod` | string | 否 | 认证方法oauth 或 bot_token |
| `botToken` | string | 否 | 自定义 Bot 的令牌 |
| `channel` | string | 否 | 要读取消息的 Slack 频道(例如,#general |
| `userId` | string | 否 | DM 话的用户 ID例如U1234567890 |
| `limit` | number | 否 | 要检索的消息数量默认10最大15 |
| `oldest` | string | 否 | 时间范围始(时间戳) |
| `latest` | string | 否 | 时间范围结束(时间戳) |
| `userId` | string | 否 | DM 话的用户 ID例如U1234567890 |
| `limit` | number | 否 | 要检索的消息数量默认10最大100 |
| `oldest` | string | 否 | 时间范围的开始(时间戳) |
| `latest` | string | 否 | 时间范围结束(时间戳) |
#### 输出

View File

@@ -37,18 +37,16 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
#### 输入
| 参数 | 类型 | 必 | 说明 |
| 参数 | 类型 | 必 | 描述 |
| --------- | ---- | -------- | ----------- |
| `model` | string | 是 | 要使用的模型例如 gpt-4o、claude-sonnet-4-5、gemini-2.0-flash |
| `systemPrompt` | string | 否 | 设置助手行为的 system prompt |
| `context` | string | 是 | 发送给模型的用户消息或上下文 |
| `apiKey` | string | 否 | 提供的 API key如未提供托管模型将使用平台密钥 |
| `temperature` | number | 否 | 响应生成的 temperature0-2 |
| `maxTokens` | number | 否 | 响应的最大 tokens 数 |
| `azureEndpoint` | string | 否 | Azure OpenAI endpoint URL |
| `model` | string | 是 | 要使用的模型 \(例如gpt-4o、claude-sonnet-4-5、gemini-2.0-flash\) |
| `systemPrompt` | string | 否 | 设置助手行为的系统提示 |
| `context` | string | 是 | 发送给模型的用户消息或上下文 |
| `apiKey` | string | 否 | 提供的 API 密钥 \(如果未为托管模型提供,则使用平台密钥\) |
| `temperature` | number | 否 | 响应生成的温度 \(0-2\) |
| `maxTokens` | number | 否 | 响应的最大令牌数 |
| `azureEndpoint` | string | 否 | Azure OpenAI 端点 URL |
| `azureApiVersion` | string | 否 | Azure OpenAI API 版本 |
| `vertexProject` | string | 否 | Vertex AI 的 Google Cloud 项目 ID |
| `vertexLocation` | string | 否 | Vertex AI 的 Google Cloud 区域(默认为 us-central1 |
#### 输出

View File

@@ -557,7 +557,7 @@ checksums:
content/8: 6325adefb6e1520835225285b18b6a45
content/9: b7fa85fce9c7476fe132df189e27dac1
content/10: 371d0e46b4bd2c23f559b8bc112f6955
content/11: 7ad14ccfe548588081626cfe769ad492
content/11: 985f435f721b00df4d13fa0a5552684c
content/12: bcadfc362b69078beee0088e5936c98b
content/13: 6af66efd0da20944a87fdb8d9defa358
content/14: b3f310d5ef115bea5a8b75bf25d7ea9a
@@ -903,7 +903,7 @@ checksums:
content/24: 228a8ece96627883153b826a1cbaa06c
content/25: 53abe061a259c296c82676b4770ddd1b
content/26: 371d0e46b4bd2c23f559b8bc112f6955
content/27: 5b9546f77fbafc0741f3fc2548f81c7e
content/27: 03e8b10ec08b354de98e360b66b779e3
content/28: bcadfc362b69078beee0088e5936c98b
content/29: b82def7d82657f941fbe60df3924eeeb
content/30: 1ca7ee3856805fa1718031c5f75b6ffb
@@ -2521,9 +2521,9 @@ checksums:
content/22: ef92d95455e378abe4d27a1cdc5e1aed
content/23: febd6019055f3754953fd93395d0dbf2
content/24: 371d0e46b4bd2c23f559b8bc112f6955
content/25: caf6acbe2a4495ca055cb9006ce47250
content/25: 7ef3f388e5ee9346bac54c771d825f40
content/26: bcadfc362b69078beee0088e5936c98b
content/27: 57662dd91f8d1d807377fd48fa0e9142
content/27: e0fa91c45aa780fc03e91df77417f893
content/28: b463f54cd5fe2458b5842549fbb5e1ce
content/29: 55f8c724e1a2463bc29a32518a512c73
content/30: 371d0e46b4bd2c23f559b8bc112f6955
@@ -2638,14 +2638,8 @@ checksums:
content/139: 33fde4c3da4584b51f06183b7b192a78
content/140: bcadfc362b69078beee0088e5936c98b
content/141: b7451190f100388d999c183958d787a7
content/142: d0f9e799e2e5cc62de60668d35fd846f
content/143: b19069ff19899fe202217e06e002c447
content/144: 371d0e46b4bd2c23f559b8bc112f6955
content/145: 480fd62f8d9cc18467e82f4c3f70beea
content/146: bcadfc362b69078beee0088e5936c98b
content/147: 4e73a65d3b873f3979587e10a0f39e72
content/148: b3f310d5ef115bea5a8b75bf25d7ea9a
content/149: 4930918f803340baa861bed9cdf789de
content/142: b3f310d5ef115bea5a8b75bf25d7ea9a
content/143: 4930918f803340baa861bed9cdf789de
8f76e389f6226f608571622b015ca6a1:
meta/title: ddfe2191ea61b34d8b7cc1d7c19b94ac
meta/description: 049ff551f2ebabb15cdea0c71bd8e4eb
@@ -4817,9 +4811,9 @@ checksums:
content/19: 85547efea8ae0e8170ac4e2030f6be25
content/20: 25c56dcdc4af1516c3fbf9d82d96b48d
content/21: 56dbe63da14a319cd520ab1615c94be7
content/22: e039f6c905c8aa148cc3e7af19f05239
content/22: e092cde0c92ef09c642a62636e7e3ae3
content/23: c7004f5db8f7134d7e3a36a1916691a2
content/24: 26555018b90fc8fb3ac65cece15f3966
content/24: bbc26961050b132b9bc4f14ba11f407a
content/25: 56dbe63da14a319cd520ab1615c94be7
content/26: 3e835ecc38acf2c76179034360d41670
content/27: a13bbc3dac7388e1ef4e9cbafdcc8241
@@ -49830,39 +49824,35 @@ checksums:
content/474: 27c398e669b297cea076e4ce4cc0c5eb
9a28da736b42bf8de55126d4c06b6150:
meta/title: 418d5c8a18ad73520b38765741601f32
meta/description: 41cb31abf94297849fb8a4023cf0211d
meta/description: 2b5a9723c7a45d2be5001d5d056b7c7b
content/0: 1b031fb0c62c46b177aeed5c3d3f8f80
content/1: e72670f88454b5b1c955b029de5fa8b5
content/2: d586e5af506d99add847369c0accfb4d
content/3: a2ce9ed4954ab55bcebed927cec8e890
content/4: 5fc7b723a6adcf201e8deb3f5ed9a9e3
content/5: a78981875c359a3343f26ed4d115f899
content/6: 821e6394b0a953e2b0842b04ae8f3105
content/7: 56a538eaccb1158fb1f7a01cc32f7331
content/8: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
content/9: 263633aee6db9332de806ae50d87de05
content/10: 5a7e2171e5f73fec5eae21a50e5de661
content/11: 371d0e46b4bd2c23f559b8bc112f6955
content/12: 5905ef5d0db0354c08394acb0b5cda4b
content/13: bcadfc362b69078beee0088e5936c98b
content/14: d81ef802f80143282cf4e534561a9570
content/15: 02233e6212003c1d121424cfd8b86b62
content/16: efe2c6dd368708de68a1addbfdb11b0c
content/17: 371d0e46b4bd2c23f559b8bc112f6955
content/18: 2722e8bee100e7bc4590fa02710e9508
content/19: bcadfc362b69078beee0088e5936c98b
content/20: 953f353184dc27db1f20156db2a9ad90
content/21: 2011e87d0555cd0ab133ef2d35e7a37b
content/22: dbf08acb413d845ec419e45b1f986bdb
content/23: 371d0e46b4bd2c23f559b8bc112f6955
content/24: afc35de2990ed0e9bb8f98dc1b9609ce
content/25: bcadfc362b69078beee0088e5936c98b
content/26: c06a5bb458242baa23d34957034c2fe7
content/27: ff043e912417bc29ac7c64520160c07d
content/28: 9c2175ab469cb6ff9e62bc8bdcf7621d
content/29: 371d0e46b4bd2c23f559b8bc112f6955
content/30: 20e6bddad8e7f34a3d09e5b0c5678c13
content/31: bcadfc362b69078beee0088e5936c98b
content/32: fd0f38eb3fe5cf95be366a4ff6b4fb90
content/33: b3f310d5ef115bea5a8b75bf25d7ea9a
content/34: 4a7b2c644e487f3d12b6a6b54f8c6773
content/2: 821e6394b0a953e2b0842b04ae8f3105
content/3: 7fa671d05a60d4f25b4980405c2c7278
content/4: 9c8aa3f09c9b2bd50ea4cdff3598ea4e
content/5: 263633aee6db9332de806ae50d87de05
content/6: 5a7e2171e5f73fec5eae21a50e5de661
content/7: 371d0e46b4bd2c23f559b8bc112f6955
content/8: 10d2d4eccb4b8923f048980dc16e43e1
content/9: bcadfc362b69078beee0088e5936c98b
content/10: d81ef802f80143282cf4e534561a9570
content/11: 02233e6212003c1d121424cfd8b86b62
content/12: efe2c6dd368708de68a1addbfdb11b0c
content/13: 371d0e46b4bd2c23f559b8bc112f6955
content/14: 0f3295854b7de5dbfab1ebd2a130b498
content/15: bcadfc362b69078beee0088e5936c98b
content/16: 953f353184dc27db1f20156db2a9ad90
content/17: 2011e87d0555cd0ab133ef2d35e7a37b
content/18: dbf08acb413d845ec419e45b1f986bdb
content/19: 371d0e46b4bd2c23f559b8bc112f6955
content/20: 3a8417b390ec7d3d55b1920c721e9006
content/21: bcadfc362b69078beee0088e5936c98b
content/22: c06a5bb458242baa23d34957034c2fe7
content/23: ff043e912417bc29ac7c64520160c07d
content/24: 9c2175ab469cb6ff9e62bc8bdcf7621d
content/25: 371d0e46b4bd2c23f559b8bc112f6955
content/26: 67e6ba04cf67f92e714ed94e7483dec5
content/27: bcadfc362b69078beee0088e5936c98b
content/28: fd0f38eb3fe5cf95be366a4ff6b4fb90
content/29: b3f310d5ef115bea5a8b75bf25d7ea9a
content/30: 4a7b2c644e487f3d12b6a6b54f8c6773

View File

@@ -4,7 +4,7 @@
"private": true,
"license": "Apache-2.0",
"scripts": {
"dev": "next dev --port 3001",
"dev": "next dev --port 7322",
"build": "fumadocs-mdx && NODE_OPTIONS='--max-old-space-size=8192' next build",
"start": "next start",
"postinstall": "fumadocs-mdx",

View File

@@ -573,10 +573,10 @@ export default function LoginPage({
<Dialog open={forgotPasswordOpen} onOpenChange={setForgotPasswordOpen}>
<DialogContent className='auth-card auth-card-shadow max-w-[540px] rounded-[10px] border backdrop-blur-sm'>
<DialogHeader>
<DialogTitle className='font-semibold text-black text-xl tracking-tight'>
<DialogTitle className='auth-text-primary font-semibold text-xl tracking-tight'>
Reset Password
</DialogTitle>
<DialogDescription className='text-muted-foreground text-sm'>
<DialogDescription className='auth-text-secondary text-sm'>
Enter your email address and we'll send you a link to reset your password if your
account exists.
</DialogDescription>

View File

@@ -70,7 +70,6 @@ export const FOOTER_TOOLS = [
'Salesforce',
'SendGrid',
'Serper',
'ServiceNow',
'SharePoint',
'Slack',
'Smtp',

View File

@@ -109,7 +109,7 @@ export default function Footer({ fullWidth = false }: FooterProps) {
{FOOTER_BLOCKS.map((block) => (
<Link
key={block}
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replaceAll(' ', '-')}`}
href={`https://docs.sim.ai/blocks/${block.toLowerCase().replace(' ', '-')}`}
target='_blank'
rel='noopener noreferrer'
className='text-[14px] text-muted-foreground transition-colors hover:text-foreground'

View File

@@ -2,6 +2,7 @@ import { Suspense } from 'react'
import dynamic from 'next/dynamic'
import { Background, Footer, Nav, StructuredData } from '@/app/(landing)/components'
// Lazy load heavy components for better initial load performance
const Hero = dynamic(() => import('@/app/(landing)/components/hero/hero'), {
loading: () => <div className='h-[600px] animate-pulse bg-gray-50' />,
})

View File

@@ -1,7 +1,8 @@
import Image from 'next/image'
import Link from 'next/link'
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
import { getAllPostMeta } from '@/lib/blog/registry'
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
import { PostGrid } from '@/app/(landing)/studio/post-grid'
export const revalidate = 3600
@@ -17,6 +18,7 @@ export default async function StudioIndex({
const all = await getAllPostMeta()
const filtered = tag ? all.filter((p) => p.tags.includes(tag)) : all
// Sort to ensure featured post is first on page 1
const sorted =
pageNum === 1
? filtered.sort((a, b) => {
@@ -61,7 +63,69 @@ export default async function StudioIndex({
</div> */}
{/* Grid layout for consistent rows */}
<PostGrid posts={posts} />
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
{posts.map((p, i) => {
return (
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
<Image
src={p.ogImage}
alt={p.title}
width={800}
height={450}
className='h-48 w-full object-cover'
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
loading='lazy'
unoptimized
/>
<div className='flex flex-1 flex-col p-4'>
<div className='mb-2 text-gray-600 text-xs'>
{new Date(p.date).toLocaleDateString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric',
})}
</div>
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
<div className='flex items-center gap-2'>
<div className='-space-x-1.5 flex'>
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
.slice(0, 3)
.map((author, idx) => (
<Avatar key={idx} className='size-4 border border-white'>
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
{author?.name.slice(0, 2)}
</AvatarFallback>
</Avatar>
))}
</div>
<span className='text-gray-600 text-xs'>
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
.slice(0, 2)
.map((a) => a?.name)
.join(', ')}
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
<>
{' '}
and{' '}
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
other
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 >
1
? 's'
: ''}
</>
)}
</span>
</div>
</div>
</div>
</Link>
)
})}
</div>
{totalPages > 1 && (
<div className='mt-10 flex items-center justify-center gap-3'>

View File

@@ -1,90 +0,0 @@
'use client'
import Image from 'next/image'
import Link from 'next/link'
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
interface Author {
id: string
name: string
avatarUrl?: string
url?: string
}
interface Post {
slug: string
title: string
description: string
date: string
ogImage: string
author: Author
authors?: Author[]
featured?: boolean
}
export function PostGrid({ posts }: { posts: Post[] }) {
return (
<div className='grid grid-cols-1 gap-4 md:grid-cols-2 md:gap-6 lg:grid-cols-3'>
{posts.map((p, index) => (
<Link key={p.slug} href={`/studio/${p.slug}`} className='group flex flex-col'>
<div className='flex h-full flex-col overflow-hidden rounded-xl border border-gray-200 transition-colors duration-300 hover:border-gray-300'>
{/* Image container with fixed aspect ratio to prevent layout shift */}
<div className='relative aspect-video w-full overflow-hidden'>
<Image
src={p.ogImage}
alt={p.title}
sizes='(max-width: 768px) 100vw, (max-width: 1024px) 50vw, 33vw'
unoptimized
priority={index < 6}
loading={index < 6 ? undefined : 'lazy'}
fill
style={{ objectFit: 'cover' }}
/>
</div>
<div className='flex flex-1 flex-col p-4'>
<div className='mb-2 text-gray-600 text-xs'>
{new Date(p.date).toLocaleDateString('en-US', {
month: 'short',
day: 'numeric',
year: 'numeric',
})}
</div>
<h3 className='shine-text mb-1 font-medium text-lg leading-tight'>{p.title}</h3>
<p className='mb-3 line-clamp-3 flex-1 text-gray-700 text-sm'>{p.description}</p>
<div className='flex items-center gap-2'>
<div className='-space-x-1.5 flex'>
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
.slice(0, 3)
.map((author, idx) => (
<Avatar key={idx} className='size-4 border border-white'>
<AvatarImage src={author?.avatarUrl} alt={author?.name} />
<AvatarFallback className='border border-white bg-gray-100 text-[10px] text-gray-600'>
{author?.name.slice(0, 2)}
</AvatarFallback>
</Avatar>
))}
</div>
<span className='text-gray-600 text-xs'>
{(p.authors && p.authors.length > 0 ? p.authors : [p.author])
.slice(0, 2)
.map((a) => a?.name)
.join(', ')}
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length > 2 && (
<>
{' '}
and {(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2}{' '}
other
{(p.authors && p.authors.length > 0 ? p.authors : [p.author]).length - 2 > 1
? 's'
: ''}
</>
)}
</span>
</div>
</div>
</div>
</Link>
))}
</div>
)
}

View File

@@ -12,7 +12,6 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
pathname === '/' ||
pathname.startsWith('/login') ||
pathname.startsWith('/signup') ||
pathname.startsWith('/reset-password') ||
pathname.startsWith('/sso') ||
pathname.startsWith('/terms') ||
pathname.startsWith('/privacy') ||

View File

@@ -759,24 +759,3 @@ input[type="search"]::-ms-clear {
--surface-elevated: #202020;
}
}
/**
* Remove backticks from inline code in prose (Tailwind Typography default)
*/
.prose code::before,
.prose code::after {
content: none !important;
}
/**
* Remove underlines from heading anchor links in prose
*/
.prose h1 a,
.prose h2 a,
.prose h3 a,
.prose h4 a,
.prose h5 a,
.prose h6 a {
text-decoration: none !important;
color: inherit !important;
}

View File

@@ -32,17 +32,7 @@ export async function GET(request: NextRequest) {
.from(account)
.where(and(...whereConditions))
// Use the user's email as the display name (consistent with credential selector)
const userEmail = session.user.email
const accountsWithDisplayName = accounts.map((acc) => ({
id: acc.id,
accountId: acc.accountId,
providerId: acc.providerId,
displayName: userEmail || acc.providerId,
}))
return NextResponse.json({ accounts: accountsWithDisplayName })
return NextResponse.json({ accounts })
} catch (error) {
logger.error('Failed to fetch accounts', { error })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })

View File

@@ -6,10 +6,6 @@
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { createMockRequest, setupAuthApiMocks } from '@/app/api/__test-utils__/utils'
vi.mock('@/lib/core/utils/urls', () => ({
getBaseUrl: vi.fn(() => 'https://app.example.com'),
}))
describe('Forget Password API Route', () => {
beforeEach(() => {
vi.resetModules()
@@ -19,7 +15,7 @@ describe('Forget Password API Route', () => {
vi.clearAllMocks()
})
it('should send password reset email successfully with same-origin redirectTo', async () => {
it('should send password reset email successfully', async () => {
setupAuthApiMocks({
operations: {
forgetPassword: { success: true },
@@ -28,7 +24,7 @@ describe('Forget Password API Route', () => {
const req = createMockRequest('POST', {
email: 'test@example.com',
redirectTo: 'https://app.example.com/reset',
redirectTo: 'https://example.com/reset',
})
const { POST } = await import('@/app/api/auth/forget-password/route')
@@ -43,36 +39,12 @@ describe('Forget Password API Route', () => {
expect(auth.auth.api.forgetPassword).toHaveBeenCalledWith({
body: {
email: 'test@example.com',
redirectTo: 'https://app.example.com/reset',
redirectTo: 'https://example.com/reset',
},
method: 'POST',
})
})
it('should reject external redirectTo URL', async () => {
setupAuthApiMocks({
operations: {
forgetPassword: { success: true },
},
})
const req = createMockRequest('POST', {
email: 'test@example.com',
redirectTo: 'https://evil.com/phishing',
})
const { POST } = await import('@/app/api/auth/forget-password/route')
const response = await POST(req)
const data = await response.json()
expect(response.status).toBe(400)
expect(data.message).toBe('Redirect URL must be a valid same-origin URL')
const auth = await import('@/lib/auth')
expect(auth.auth.api.forgetPassword).not.toHaveBeenCalled()
})
it('should send password reset email without redirectTo', async () => {
setupAuthApiMocks({
operations: {

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { auth } from '@/lib/auth'
import { isSameOrigin } from '@/lib/core/utils/validation'
import { createLogger } from '@/lib/logs/console/logger'
export const dynamic = 'force-dynamic'
@@ -14,15 +13,10 @@ const forgetPasswordSchema = z.object({
.email('Please provide a valid email address'),
redirectTo: z
.string()
.url('Redirect URL must be a valid URL')
.optional()
.or(z.literal(''))
.transform((val) => (val === '' || val === undefined ? undefined : val))
.refine(
(val) => val === undefined || (z.string().url().safeParse(val).success && isSameOrigin(val)),
{
message: 'Redirect URL must be a valid same-origin URL',
}
),
.transform((val) => (val === '' ? undefined : val)),
})
export async function POST(request: NextRequest) {

View File

@@ -38,6 +38,7 @@ vi.mock('@/lib/logs/console/logger', () => ({
}))
import { db } from '@sim/db'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshOAuthToken } from '@/lib/oauth/oauth'
import {
getCredential,
@@ -48,6 +49,7 @@ import {
const mockDb = db as any
const mockRefreshOAuthToken = refreshOAuthToken as any
const mockLogger = (createLogger as any)()
describe('OAuth Utils', () => {
beforeEach(() => {
@@ -85,6 +87,7 @@ describe('OAuth Utils', () => {
const userId = await getUserId('request-id')
expect(userId).toBeUndefined()
expect(mockLogger.warn).toHaveBeenCalled()
})
it('should return undefined if workflow is not found', async () => {
@@ -93,6 +96,7 @@ describe('OAuth Utils', () => {
const userId = await getUserId('request-id', 'nonexistent-workflow-id')
expect(userId).toBeUndefined()
expect(mockLogger.warn).toHaveBeenCalled()
})
})
@@ -117,6 +121,7 @@ describe('OAuth Utils', () => {
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
expect(credential).toBeUndefined()
expect(mockLogger.warn).toHaveBeenCalled()
})
})
@@ -134,6 +139,7 @@ describe('OAuth Utils', () => {
expect(mockRefreshOAuthToken).not.toHaveBeenCalled()
expect(result).toEqual({ accessToken: 'valid-token', refreshed: false })
expect(mockLogger.info).toHaveBeenCalledWith(expect.stringContaining('Access token is valid'))
})
it('should refresh token when expired', async () => {
@@ -153,10 +159,13 @@ describe('OAuth Utils', () => {
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token', undefined)
expect(mockDb.update).toHaveBeenCalled()
expect(mockDb.set).toHaveBeenCalled()
expect(result).toEqual({ accessToken: 'new-token', refreshed: true })
expect(mockLogger.info).toHaveBeenCalledWith(
expect.stringContaining('Successfully refreshed')
)
})
it('should handle refresh token error', async () => {
@@ -173,6 +182,8 @@ describe('OAuth Utils', () => {
await expect(
refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
).rejects.toThrow('Failed to refresh token')
expect(mockLogger.error).toHaveBeenCalled()
})
it('should not attempt refresh if no refresh token', async () => {
@@ -228,7 +239,7 @@ describe('OAuth Utils', () => {
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token')
expect(mockRefreshOAuthToken).toHaveBeenCalledWith('google', 'refresh-token', undefined)
expect(mockDb.update).toHaveBeenCalled()
expect(mockDb.set).toHaveBeenCalled()
expect(token).toBe('new-token')
@@ -240,6 +251,7 @@ describe('OAuth Utils', () => {
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
expect(token).toBeNull()
expect(mockLogger.warn).toHaveBeenCalled()
})
it('should return null if refresh fails', async () => {
@@ -258,6 +270,7 @@ describe('OAuth Utils', () => {
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
expect(token).toBeNull()
expect(mockLogger.error).toHaveBeenCalled()
})
})
})

View File

@@ -132,7 +132,14 @@ export async function getOAuthToken(userId: string, providerId: string): Promise
try {
// Use the existing refreshOAuthToken function
const refreshResult = await refreshOAuthToken(providerId, credential.refreshToken!)
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
const instanceUrl =
providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
const refreshResult = await refreshOAuthToken(
providerId,
credential.refreshToken!,
instanceUrl
)
if (!refreshResult) {
logger.error(`Failed to refresh token for user ${userId}, provider ${providerId}`, {
@@ -215,9 +222,13 @@ export async function refreshAccessTokenIfNeeded(
if (shouldRefresh) {
logger.info(`[${requestId}] Token expired, attempting to refresh for credential`)
try {
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
const instanceUrl =
credential.providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
const refreshedToken = await refreshOAuthToken(
credential.providerId,
credential.refreshToken!
credential.refreshToken!,
instanceUrl
)
if (!refreshedToken) {
@@ -289,7 +300,14 @@ export async function refreshTokenIfNeeded(
}
try {
const refreshResult = await refreshOAuthToken(credential.providerId, credential.refreshToken!)
// For ServiceNow, pass the instance URL (stored in idToken) for the token endpoint
const instanceUrl =
credential.providerId === 'servicenow' ? (credential.idToken ?? undefined) : undefined
const refreshResult = await refreshOAuthToken(
credential.providerId,
credential.refreshToken!,
instanceUrl
)
if (!refreshResult) {
logger.error(`[${requestId}] Failed to refresh token for credential`)

View File

@@ -0,0 +1,166 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('ServiceNowCallback')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
const baseUrl = getBaseUrl()
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.redirect(`${baseUrl}/workspace?error=unauthorized`)
}
const { searchParams } = request.nextUrl
const code = searchParams.get('code')
const state = searchParams.get('state')
const error = searchParams.get('error')
const errorDescription = searchParams.get('error_description')
// Handle OAuth errors from ServiceNow
if (error) {
logger.error('ServiceNow OAuth error:', { error, errorDescription })
return NextResponse.redirect(
`${baseUrl}/workspace?error=servicenow_auth_error&message=${encodeURIComponent(errorDescription || error)}`
)
}
const storedState = request.cookies.get('servicenow_oauth_state')?.value
const storedInstanceUrl = request.cookies.get('servicenow_instance_url')?.value
const clientId = env.SERVICENOW_CLIENT_ID
const clientSecret = env.SERVICENOW_CLIENT_SECRET
if (!clientId || !clientSecret) {
logger.error('ServiceNow credentials not configured')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_config_error`)
}
// Validate state parameter
if (!state || state !== storedState) {
logger.error('State mismatch in ServiceNow OAuth callback')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_state_mismatch`)
}
// Validate authorization code
if (!code) {
logger.error('No code received from ServiceNow')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_code`)
}
// Validate instance URL
if (!storedInstanceUrl) {
logger.error('No instance URL stored')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_instance`)
}
const redirectUri = `${baseUrl}/api/auth/oauth2/callback/servicenow`
// Exchange authorization code for access token
const tokenResponse = await fetch(`${storedInstanceUrl}/oauth_token.do`, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
body: new URLSearchParams({
grant_type: 'authorization_code',
code: code,
redirect_uri: redirectUri,
client_id: clientId,
client_secret: clientSecret,
}).toString(),
})
if (!tokenResponse.ok) {
const errorText = await tokenResponse.text()
logger.error('Failed to exchange code for token:', {
status: tokenResponse.status,
body: errorText,
})
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_token_error`)
}
const tokenData = await tokenResponse.json()
const accessToken = tokenData.access_token
const refreshToken = tokenData.refresh_token
const expiresIn = tokenData.expires_in
// ServiceNow always grants 'useraccount' scope but returns empty string
const scope = tokenData.scope || 'useraccount'
logger.info('ServiceNow token exchange successful:', {
hasAccessToken: !!accessToken,
hasRefreshToken: !!refreshToken,
expiresIn,
})
if (!accessToken) {
logger.error('No access token in response')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_no_token`)
}
// Redirect to store endpoint with token data in cookies
const storeUrl = new URL(`${baseUrl}/api/auth/oauth2/servicenow/store`)
const response = NextResponse.redirect(storeUrl)
// Store token data in secure cookies for the store endpoint
response.cookies.set('servicenow_pending_token', accessToken, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60, // 1 minute
path: '/',
})
if (refreshToken) {
response.cookies.set('servicenow_pending_refresh_token', refreshToken, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60,
path: '/',
})
}
response.cookies.set('servicenow_pending_instance', storedInstanceUrl, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60,
path: '/',
})
response.cookies.set('servicenow_pending_scope', scope || '', {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60,
path: '/',
})
if (expiresIn) {
response.cookies.set('servicenow_pending_expires_in', expiresIn.toString(), {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60,
path: '/',
})
}
// Clean up OAuth state cookies
response.cookies.delete('servicenow_oauth_state')
response.cookies.delete('servicenow_instance_url')
return response
} catch (error) {
logger.error('Error in ServiceNow OAuth callback:', error)
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_callback_error`)
}
}

View File

@@ -0,0 +1,142 @@
import { db } from '@sim/db'
import { account } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { safeAccountInsert } from '@/app/api/auth/oauth/utils'
const logger = createLogger('ServiceNowStore')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
const baseUrl = getBaseUrl()
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn('Unauthorized attempt to store ServiceNow token')
return NextResponse.redirect(`${baseUrl}/workspace?error=unauthorized`)
}
// Retrieve token data from cookies
const accessToken = request.cookies.get('servicenow_pending_token')?.value
const refreshToken = request.cookies.get('servicenow_pending_refresh_token')?.value
const instanceUrl = request.cookies.get('servicenow_pending_instance')?.value
const scope = request.cookies.get('servicenow_pending_scope')?.value
const expiresInStr = request.cookies.get('servicenow_pending_expires_in')?.value
if (!accessToken || !instanceUrl) {
logger.error('Missing token or instance URL in cookies')
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_missing_data`)
}
// Validate the token by fetching user info from ServiceNow
const userResponse = await fetch(
`${instanceUrl}/api/now/table/sys_user?sysparm_query=user_name=${encodeURIComponent('javascript:gs.getUserName()')}&sysparm_limit=1`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
}
)
// Alternative: Use the instance info endpoint instead
let accountIdentifier = instanceUrl
let userInfo: Record<string, unknown> | null = null
// Try to get current user info
try {
const whoamiResponse = await fetch(`${instanceUrl}/api/now/ui/user/current_user`, {
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
})
if (whoamiResponse.ok) {
const whoamiData = await whoamiResponse.json()
userInfo = whoamiData.result
if (userInfo?.user_sys_id) {
accountIdentifier = userInfo.user_sys_id as string
} else if (userInfo?.user_name) {
accountIdentifier = userInfo.user_name as string
}
logger.info('Retrieved ServiceNow user info', { accountIdentifier })
}
} catch (e) {
logger.warn('Could not retrieve ServiceNow user info, using instance URL as identifier')
}
// Calculate expiration time
const now = new Date()
const expiresIn = expiresInStr ? Number.parseInt(expiresInStr, 10) : 3600 // Default to 1 hour
const accessTokenExpiresAt = new Date(now.getTime() + expiresIn * 1000)
// Check for existing ServiceNow account for this user
const existing = await db.query.account.findFirst({
where: and(eq(account.userId, session.user.id), eq(account.providerId, 'servicenow')),
})
// ServiceNow always grants 'useraccount' scope but returns empty string
const effectiveScope = scope?.trim() ? scope : 'useraccount'
const accountData = {
accessToken: accessToken,
refreshToken: refreshToken || null,
accountId: accountIdentifier,
scope: effectiveScope,
updatedAt: now,
accessTokenExpiresAt: accessTokenExpiresAt,
idToken: instanceUrl, // Store instance URL in idToken for API calls
}
if (existing) {
await db.update(account).set(accountData).where(eq(account.id, existing.id))
logger.info('Updated existing ServiceNow account', { accountId: existing.id })
} else {
await safeAccountInsert(
{
id: `servicenow_${session.user.id}_${Date.now()}`,
userId: session.user.id,
providerId: 'servicenow',
accountId: accountData.accountId,
accessToken: accountData.accessToken,
refreshToken: accountData.refreshToken || undefined,
accessTokenExpiresAt: accountData.accessTokenExpiresAt,
scope: accountData.scope,
idToken: accountData.idToken,
createdAt: now,
updatedAt: now,
},
{ provider: 'ServiceNow', identifier: instanceUrl }
)
logger.info('Created new ServiceNow account')
}
// Get return URL from cookie
const returnUrl = request.cookies.get('servicenow_return_url')?.value
const redirectUrl = returnUrl || `${baseUrl}/workspace`
const finalUrl = new URL(redirectUrl)
finalUrl.searchParams.set('servicenow_connected', 'true')
const response = NextResponse.redirect(finalUrl.toString())
// Clean up all ServiceNow cookies
response.cookies.delete('servicenow_pending_token')
response.cookies.delete('servicenow_pending_refresh_token')
response.cookies.delete('servicenow_pending_instance')
response.cookies.delete('servicenow_pending_scope')
response.cookies.delete('servicenow_pending_expires_in')
response.cookies.delete('servicenow_return_url')
return response
} catch (error) {
logger.error('Error storing ServiceNow token:', error)
return NextResponse.redirect(`${baseUrl}/workspace?error=servicenow_store_error`)
}
}

View File

@@ -0,0 +1,264 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { env } from '@/lib/core/config/env'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('ServiceNowAuthorize')
export const dynamic = 'force-dynamic'
/**
* ServiceNow OAuth scopes
* useraccount - Default scope for user account access
* Note: ServiceNow always returns 'useraccount' in OAuth responses regardless of requested scopes.
* Table API permissions are configured at the OAuth application level in ServiceNow.
*/
const SERVICENOW_SCOPES = 'useraccount'
/**
* Validates a ServiceNow instance URL format
*/
function isValidInstanceUrl(url: string): boolean {
try {
const parsed = new URL(url)
return (
parsed.protocol === 'https:' &&
(parsed.hostname.endsWith('.service-now.com') || parsed.hostname.endsWith('.servicenow.com'))
)
} catch {
return false
}
}
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const clientId = env.SERVICENOW_CLIENT_ID
if (!clientId) {
logger.error('SERVICENOW_CLIENT_ID not configured')
return NextResponse.json({ error: 'ServiceNow client ID not configured' }, { status: 500 })
}
const instanceUrl = request.nextUrl.searchParams.get('instanceUrl')
const returnUrl = request.nextUrl.searchParams.get('returnUrl')
if (!instanceUrl) {
const returnUrlParam = returnUrl ? encodeURIComponent(returnUrl) : ''
return new NextResponse(
`<!DOCTYPE html>
<html>
<head>
<title>Connect ServiceNow Instance</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;
display: flex;
align-items: center;
justify-content: center;
height: 100vh;
margin: 0;
background: linear-gradient(135deg, #81B5A1 0%, #5A8A75 100%);
}
.container {
background: white;
padding: 2rem;
border-radius: 12px;
box-shadow: 0 10px 40px rgba(0,0,0,0.1);
text-align: center;
max-width: 450px;
width: 90%;
}
h2 {
color: #111827;
margin: 0 0 0.5rem 0;
}
p {
color: #6b7280;
margin: 0 0 1.5rem 0;
}
input {
width: 100%;
padding: 0.75rem;
border: 1px solid #d1d5db;
border-radius: 8px;
font-size: 1rem;
margin-bottom: 1rem;
box-sizing: border-box;
}
input:focus {
outline: none;
border-color: #81B5A1;
box-shadow: 0 0 0 3px rgba(129, 181, 161, 0.2);
}
button {
width: 100%;
padding: 0.75rem;
background: #81B5A1;
color: white;
border: none;
border-radius: 8px;
font-size: 1rem;
cursor: pointer;
font-weight: 500;
}
button:hover {
background: #6A9A87;
}
.help {
font-size: 0.875rem;
color: #9ca3af;
margin-top: 1rem;
}
.error {
color: #dc2626;
font-size: 0.875rem;
margin-bottom: 1rem;
display: none;
}
</style>
</head>
<body>
<div class="container">
<h2>Connect Your ServiceNow Instance</h2>
<p>Enter your ServiceNow instance URL to continue</p>
<div id="error" class="error"></div>
<form onsubmit="handleSubmit(event)">
<input
type="text"
id="instanceUrl"
placeholder="https://mycompany.service-now.com"
required
/>
<button type="submit">Connect Instance</button>
</form>
<p class="help">Your instance URL looks like: https://yourcompany.service-now.com</p>
</div>
<script>
const returnUrl = '${returnUrlParam}';
function handleSubmit(e) {
e.preventDefault();
const errorEl = document.getElementById('error');
let instanceUrl = document.getElementById('instanceUrl').value.trim();
// Ensure https:// prefix
if (!instanceUrl.startsWith('https://') && !instanceUrl.startsWith('http://')) {
instanceUrl = 'https://' + instanceUrl;
}
// Validate the URL format
try {
const parsed = new URL(instanceUrl);
if (!parsed.hostname.endsWith('.service-now.com') && !parsed.hostname.endsWith('.servicenow.com')) {
errorEl.textContent = 'Please enter a valid ServiceNow instance URL (e.g., https://yourcompany.service-now.com)';
errorEl.style.display = 'block';
return;
}
// Clean the URL (remove trailing slashes, paths)
instanceUrl = parsed.origin;
} catch {
errorEl.textContent = 'Please enter a valid URL';
errorEl.style.display = 'block';
return;
}
let url = window.location.pathname + '?instanceUrl=' + encodeURIComponent(instanceUrl);
if (returnUrl) {
url += '&returnUrl=' + returnUrl;
}
window.location.href = url;
}
</script>
</body>
</html>`,
{
headers: {
'Content-Type': 'text/html; charset=utf-8',
'Cache-Control': 'no-store, no-cache, must-revalidate',
},
}
)
}
// Validate instance URL
if (!isValidInstanceUrl(instanceUrl)) {
logger.error('Invalid ServiceNow instance URL:', { instanceUrl })
return NextResponse.json(
{
error:
'Invalid ServiceNow instance URL. Must be a valid .service-now.com or .servicenow.com domain.',
},
{ status: 400 }
)
}
// Clean the instance URL
const parsedUrl = new URL(instanceUrl)
const cleanInstanceUrl = parsedUrl.origin
const baseUrl = getBaseUrl()
const redirectUri = `${baseUrl}/api/auth/oauth2/callback/servicenow`
const state = crypto.randomUUID()
// ServiceNow OAuth authorization URL
const oauthUrl =
`${cleanInstanceUrl}/oauth_auth.do?` +
new URLSearchParams({
response_type: 'code',
client_id: clientId,
redirect_uri: redirectUri,
state: state,
scope: SERVICENOW_SCOPES,
}).toString()
logger.info('Initiating ServiceNow OAuth:', {
instanceUrl: cleanInstanceUrl,
requestedScopes: SERVICENOW_SCOPES,
redirectUri,
returnUrl: returnUrl || 'not specified',
})
const response = NextResponse.redirect(oauthUrl)
// Store state and instance URL in cookies for validation in callback
response.cookies.set('servicenow_oauth_state', state, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60 * 10, // 10 minutes
path: '/',
})
response.cookies.set('servicenow_instance_url', cleanInstanceUrl, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60 * 10,
path: '/',
})
if (returnUrl) {
response.cookies.set('servicenow_return_url', returnUrl, {
httpOnly: true,
secure: process.env.NODE_ENV === 'production',
sameSite: 'lax',
maxAge: 60 * 10,
path: '/',
})
}
return response
} catch (error) {
logger.error('Error initiating ServiceNow authorization:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -2,7 +2,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { auth } from '@/lib/auth'
import { env } from '@/lib/core/config/env'
import { REDACTED_MARKER } from '@/lib/core/security/redaction'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('SSO-Register')
@@ -237,13 +236,13 @@ export async function POST(request: NextRequest) {
oidcConfig: providerConfig.oidcConfig
? {
...providerConfig.oidcConfig,
clientSecret: REDACTED_MARKER,
clientSecret: '[REDACTED]',
}
: undefined,
samlConfig: providerConfig.samlConfig
? {
...providerConfig.samlConfig,
cert: REDACTED_MARKER,
cert: '[REDACTED]',
}
: undefined,
},

View File

@@ -11,7 +11,7 @@ import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('CopilotChatsListAPI')
export async function GET(_request: NextRequest) {
export async function GET(_req: NextRequest) {
try {
const { userId, isAuthenticated } = await authenticateCopilotRequestSessionOnly()
if (!isAuthenticated || !userId) {

View File

@@ -38,13 +38,14 @@ export async function GET(
const cloudKey = isCloudPath ? path.slice(1).join('/') : fullPath
const contextParam = request.nextUrl.searchParams.get('context')
const legacyBucketType = request.nextUrl.searchParams.get('bucket')
const context = contextParam || (isCloudPath ? inferContextFromKey(cloudKey) : undefined)
if (context === 'profile-pictures' || context === 'og-images') {
logger.info(`Serving public ${context}:`, { cloudKey })
if (context === 'profile-pictures') {
logger.info('Serving public profile picture:', { cloudKey })
if (isUsingCloudStorage() || isCloudPath) {
return await handleCloudProxyPublic(cloudKey, context)
return await handleCloudProxyPublic(cloudKey, context, legacyBucketType)
}
return await handleLocalFilePublic(fullPath)
}
@@ -181,7 +182,8 @@ async function handleCloudProxy(
async function handleCloudProxyPublic(
cloudKey: string,
context: StorageContext
context: StorageContext,
legacyBucketType?: string | null
): Promise<NextResponse> {
try {
let fileBuffer: Buffer

View File

@@ -141,23 +141,6 @@ export async function DELETE(
)
}
// Check if deleting this folder would delete the last workflow(s) in the workspace
const workflowsInFolder = await countWorkflowsInFolderRecursively(
id,
existingFolder.workspaceId
)
const totalWorkflowsInWorkspace = await db
.select({ id: workflow.id })
.from(workflow)
.where(eq(workflow.workspaceId, existingFolder.workspaceId))
if (workflowsInFolder > 0 && workflowsInFolder >= totalWorkflowsInWorkspace.length) {
return NextResponse.json(
{ error: 'Cannot delete folder containing the only workflow(s) in the workspace' },
{ status: 400 }
)
}
// Recursively delete folder and all its contents
const deletionStats = await deleteFolderRecursively(id, existingFolder.workspaceId)
@@ -219,34 +202,6 @@ async function deleteFolderRecursively(
return stats
}
/**
* Counts the number of workflows in a folder and all its subfolders recursively.
*/
async function countWorkflowsInFolderRecursively(
folderId: string,
workspaceId: string
): Promise<number> {
let count = 0
const workflowsInFolder = await db
.select({ id: workflow.id })
.from(workflow)
.where(and(eq(workflow.folderId, folderId), eq(workflow.workspaceId, workspaceId)))
count += workflowsInFolder.length
const childFolders = await db
.select({ id: workflowFolder.id })
.from(workflowFolder)
.where(and(eq(workflowFolder.parentId, folderId), eq(workflowFolder.workspaceId, workspaceId)))
for (const childFolder of childFolders) {
count += await countWorkflowsInFolderRecursively(childFolder.id, workspaceId)
}
return count
}
// Helper function to check for circular references
async function checkForCircularReference(folderId: string, parentId: string): Promise<boolean> {
let currentParentId: string | null = parentId

View File

@@ -1,6 +1,7 @@
import { runs } from '@trigger.dev/sdk'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-key/service'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { createErrorResponse } from '@/app/api/workflows/utils'
@@ -17,44 +18,38 @@ export async function GET(
try {
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized task status request`)
return createErrorResponse(authResult.error || 'Authentication required', 401)
// Try session auth first (for web UI)
const session = await getSession()
let authenticatedUserId: string | null = session?.user?.id || null
if (!authenticatedUserId) {
const apiKeyHeader = request.headers.get('x-api-key')
if (apiKeyHeader) {
const authResult = await authenticateApiKeyFromHeader(apiKeyHeader)
if (authResult.success && authResult.userId) {
authenticatedUserId = authResult.userId
if (authResult.keyId) {
await updateApiKeyLastUsed(authResult.keyId).catch((error) => {
logger.warn(`[${requestId}] Failed to update API key last used timestamp:`, {
keyId: authResult.keyId,
error,
})
})
}
}
}
}
const authenticatedUserId = authResult.userId
if (!authenticatedUserId) {
return createErrorResponse('Authentication required', 401)
}
// Fetch task status from Trigger.dev
const run = await runs.retrieve(taskId)
logger.debug(`[${requestId}] Task ${taskId} status: ${run.status}`)
const payload = run.payload as any
if (payload?.workflowId) {
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
const accessCheck = await verifyWorkflowAccess(authenticatedUserId, payload.workflowId)
if (!accessCheck.hasAccess) {
logger.warn(`[${requestId}] User ${authenticatedUserId} denied access to task ${taskId}`, {
workflowId: payload.workflowId,
})
return createErrorResponse('Access denied', 403)
}
logger.debug(`[${requestId}] User ${authenticatedUserId} has access to task ${taskId}`)
} else {
if (payload?.userId && payload.userId !== authenticatedUserId) {
logger.warn(
`[${requestId}] User ${authenticatedUserId} attempted to access task ${taskId} owned by ${payload.userId}`
)
return createErrorResponse('Access denied', 403)
}
if (!payload?.userId) {
logger.warn(
`[${requestId}] Task ${taskId} has no ownership information in payload. Denying access for security.`
)
return createErrorResponse('Access denied', 403)
}
}
// Map Trigger.dev status to our format
const statusMap = {
QUEUED: 'queued',
WAITING_FOR_DEPLOY: 'queued',
@@ -72,6 +67,7 @@ export async function GET(
const mappedStatus = statusMap[run.status as keyof typeof statusMap] || 'unknown'
// Build response based on status
const response: any = {
success: true,
taskId,
@@ -81,18 +77,21 @@ export async function GET(
},
}
// Add completion details if finished
if (mappedStatus === 'completed') {
response.output = run.output // This contains the workflow execution results
response.metadata.completedAt = run.finishedAt
response.metadata.duration = run.durationMs
}
// Add error details if failed
if (mappedStatus === 'failed') {
response.error = run.error
response.metadata.completedAt = run.finishedAt
response.metadata.duration = run.durationMs
}
// Add progress info if still processing
if (mappedStatus === 'processing' || mappedStatus === 'queued') {
response.estimatedDuration = 180000 // 3 minutes max from our config
}
@@ -108,3 +107,6 @@ export async function GET(
return createErrorResponse('Failed to fetch task status', 500)
}
}
// TODO: Implement task cancellation via Trigger.dev API if needed
// export async function DELETE() { ... }

View File

@@ -156,7 +156,6 @@ export async function POST(
const validatedData = CreateChunkSchema.parse(searchParams)
const docTags = {
// Text tags (7 slots)
tag1: doc.tag1 ?? null,
tag2: doc.tag2 ?? null,
tag3: doc.tag3 ?? null,
@@ -164,19 +163,6 @@ export async function POST(
tag5: doc.tag5 ?? null,
tag6: doc.tag6 ?? null,
tag7: doc.tag7 ?? null,
// Number tags (5 slots)
number1: doc.number1 ?? null,
number2: doc.number2 ?? null,
number3: doc.number3 ?? null,
number4: doc.number4 ?? null,
number5: doc.number5 ?? null,
// Date tags (2 slots)
date1: doc.date1 ?? null,
date2: doc.date2 ?? null,
// Boolean tags (3 slots)
boolean1: doc.boolean1 ?? null,
boolean2: doc.boolean2 ?? null,
boolean3: doc.boolean3 ?? null,
}
const newChunk = await createChunk(

View File

@@ -72,16 +72,6 @@ describe('Document By ID API Route', () => {
tag5: null,
tag6: null,
tag7: null,
number1: null,
number2: null,
number3: null,
number4: null,
number5: null,
date1: null,
date2: null,
boolean1: null,
boolean2: null,
boolean3: null,
deletedAt: null,
}

View File

@@ -23,7 +23,7 @@ const UpdateDocumentSchema = z.object({
processingError: z.string().optional(),
markFailedDueToTimeout: z.boolean().optional(),
retryProcessing: z.boolean().optional(),
// Text tag fields
// Tag fields
tag1: z.string().optional(),
tag2: z.string().optional(),
tag3: z.string().optional(),
@@ -31,19 +31,6 @@ const UpdateDocumentSchema = z.object({
tag5: z.string().optional(),
tag6: z.string().optional(),
tag7: z.string().optional(),
// Number tag fields
number1: z.string().optional(),
number2: z.string().optional(),
number3: z.string().optional(),
number4: z.string().optional(),
number5: z.string().optional(),
// Date tag fields
date1: z.string().optional(),
date2: z.string().optional(),
// Boolean tag fields
boolean1: z.string().optional(),
boolean2: z.string().optional(),
boolean3: z.string().optional(),
})
export async function GET(

View File

@@ -80,16 +80,6 @@ describe('Knowledge Base Documents API Route', () => {
tag5: null,
tag6: null,
tag7: null,
number1: null,
number2: null,
number3: null,
number4: null,
number5: null,
date1: null,
date2: null,
boolean1: null,
boolean2: null,
boolean3: null,
deletedAt: null,
}

View File

@@ -27,7 +27,7 @@ const UpdateKnowledgeBaseSchema = z.object({
.optional(),
})
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -133,10 +133,7 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
}
}
export async function DELETE(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
export async function DELETE(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params

View File

@@ -64,11 +64,6 @@ vi.mock('@/app/api/knowledge/utils', () => ({
checkKnowledgeBaseAccess: mockCheckKnowledgeBaseAccess,
}))
const mockGetDocumentTagDefinitions = vi.fn()
vi.mock('@/lib/knowledge/tags/service', () => ({
getDocumentTagDefinitions: mockGetDocumentTagDefinitions,
}))
const mockHandleTagOnlySearch = vi.fn()
const mockHandleVectorOnlySearch = vi.fn()
const mockHandleTagAndVectorSearch = vi.fn()
@@ -161,7 +156,6 @@ describe('Knowledge Search API Route', () => {
doc1: 'Document 1',
doc2: 'Document 2',
})
mockGetDocumentTagDefinitions.mockClear()
vi.stubGlobal('crypto', {
randomUUID: vi.fn().mockReturnValue('mock-uuid-1234-5678'),
@@ -665,8 +659,8 @@ describe('Knowledge Search API Route', () => {
describe('Optional Query Search', () => {
const mockTagDefinitions = [
{ tagSlot: 'tag1', displayName: 'category', fieldType: 'text' },
{ tagSlot: 'tag2', displayName: 'priority', fieldType: 'text' },
{ tagSlot: 'tag1', displayName: 'category' },
{ tagSlot: 'tag2', displayName: 'priority' },
]
const mockTaggedResults = [
@@ -695,7 +689,9 @@ describe('Knowledge Search API Route', () => {
it('should perform tag-only search without query', async () => {
const tagOnlyData = {
knowledgeBaseIds: 'kb-123',
tagFilters: [{ tagName: 'category', value: 'api', fieldType: 'text', operator: 'eq' }],
filters: {
category: 'api',
},
topK: 10,
}
@@ -710,11 +706,10 @@ describe('Knowledge Search API Route', () => {
},
})
// Mock tag definitions for validation
mockGetDocumentTagDefinitions.mockResolvedValue(mockTagDefinitions)
// Mock tag definitions queries for display mapping
mockDbChain.limit.mockResolvedValueOnce(mockTagDefinitions)
// Mock tag definitions queries for filter mapping and display mapping
mockDbChain.limit
.mockResolvedValueOnce(mockTagDefinitions) // Tag definitions for filter mapping
.mockResolvedValueOnce(mockTagDefinitions) // Tag definitions for display mapping
// Mock the tag-only search handler
mockHandleTagOnlySearch.mockResolvedValue(mockTaggedResults)
@@ -734,9 +729,7 @@ describe('Knowledge Search API Route', () => {
expect(mockHandleTagOnlySearch).toHaveBeenCalledWith({
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [
{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api', valueTo: undefined },
],
filters: { category: 'api' }, // Note: When no tag definitions are found, it uses the original filter key
})
})
@@ -744,7 +737,9 @@ describe('Knowledge Search API Route', () => {
const combinedData = {
knowledgeBaseIds: 'kb-123',
query: 'test search',
tagFilters: [{ tagName: 'category', value: 'api', fieldType: 'text', operator: 'eq' }],
filters: {
category: 'api',
},
topK: 10,
}
@@ -759,11 +754,10 @@ describe('Knowledge Search API Route', () => {
},
})
// Mock tag definitions for validation
mockGetDocumentTagDefinitions.mockResolvedValue(mockTagDefinitions)
// Mock tag definitions queries for display mapping
mockDbChain.limit.mockResolvedValueOnce(mockTagDefinitions)
// Mock tag definitions queries for filter mapping and display mapping
mockDbChain.limit
.mockResolvedValueOnce(mockTagDefinitions) // Tag definitions for filter mapping
.mockResolvedValueOnce(mockTagDefinitions) // Tag definitions for display mapping
// Mock the tag + vector search handler
mockHandleTagAndVectorSearch.mockResolvedValue(mockSearchResults)
@@ -790,9 +784,7 @@ describe('Knowledge Search API Route', () => {
expect(mockHandleTagAndVectorSearch).toHaveBeenCalledWith({
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [
{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api', valueTo: undefined },
],
filters: { category: 'api' }, // Note: When no tag definitions are found, it uses the original filter key
queryVector: JSON.stringify(mockEmbedding),
distanceThreshold: 1, // Single KB uses threshold of 1.0
})
@@ -936,10 +928,10 @@ describe('Knowledge Search API Route', () => {
it('should handle tag-only search with multiple knowledge bases', async () => {
const multiKbTagData = {
knowledgeBaseIds: ['kb-123', 'kb-456'],
tagFilters: [
{ tagName: 'category', value: 'docs', fieldType: 'text', operator: 'eq' },
{ tagName: 'priority', value: 'high', fieldType: 'text', operator: 'eq' },
],
filters: {
category: 'docs',
priority: 'high',
},
topK: 10,
}
@@ -959,14 +951,37 @@ describe('Knowledge Search API Route', () => {
knowledgeBase: { id: 'kb-456', userId: 'user-123', name: 'Test KB 2' },
})
// Mock tag definitions for validation
mockGetDocumentTagDefinitions.mockResolvedValue(mockTagDefinitions)
// Reset all mocks before setting up specific behavior
Object.values(mockDbChain).forEach((fn) => {
if (typeof fn === 'function') {
fn.mockClear().mockReturnThis()
}
})
// Mock the tag-only search handler
mockHandleTagOnlySearch.mockResolvedValue(mockTaggedResults)
// Create fresh mocks for multiple database calls needed for multi-KB tag search
const mockTagDefsQuery1 = {
...mockDbChain,
limit: vi.fn().mockResolvedValue(mockTagDefinitions),
}
const mockTagSearchQuery = {
...mockDbChain,
limit: vi.fn().mockResolvedValue(mockTaggedResults),
}
const mockTagDefsQuery2 = {
...mockDbChain,
limit: vi.fn().mockResolvedValue(mockTagDefinitions),
}
const mockTagDefsQuery3 = {
...mockDbChain,
limit: vi.fn().mockResolvedValue(mockTagDefinitions),
}
// Mock tag definitions queries for display mapping
mockDbChain.limit.mockResolvedValueOnce(mockTagDefinitions)
// Chain the mocks for: tag defs, search, display mapping KB1, display mapping KB2
mockDbChain.select
.mockReturnValueOnce(mockTagDefsQuery1)
.mockReturnValueOnce(mockTagSearchQuery)
.mockReturnValueOnce(mockTagDefsQuery2)
.mockReturnValueOnce(mockTagDefsQuery3)
const req = createMockRequest('POST', multiKbTagData)
const { POST } = await import('@/app/api/knowledge/search/route')
@@ -1061,11 +1076,6 @@ describe('Knowledge Search API Route', () => {
},
})
// Mock tag definitions for validation
mockGetDocumentTagDefinitions.mockResolvedValue([
{ tagSlot: 'tag1', displayName: 'tag1', fieldType: 'text' },
])
mockHandleTagOnlySearch.mockResolvedValue([
{
id: 'chunk-2',
@@ -1098,15 +1108,13 @@ describe('Knowledge Search API Route', () => {
const mockTagDefs = {
select: vi.fn().mockReturnThis(),
from: vi.fn().mockReturnThis(),
where: vi
.fn()
.mockResolvedValue([{ tagSlot: 'tag1', displayName: 'tag1', fieldType: 'text' }]),
where: vi.fn().mockResolvedValue([]),
}
mockDbChain.select.mockReturnValueOnce(mockTagDefs)
const req = createMockRequest('POST', {
knowledgeBaseIds: ['kb-123'],
tagFilters: [{ tagName: 'tag1', value: 'api', fieldType: 'text', operator: 'eq' }],
filters: { tag1: 'api' },
topK: 10,
})
@@ -1135,11 +1143,6 @@ describe('Knowledge Search API Route', () => {
},
})
// Mock tag definitions for validation
mockGetDocumentTagDefinitions.mockResolvedValue([
{ tagSlot: 'tag1', displayName: 'tag1', fieldType: 'text' },
])
mockHandleTagAndVectorSearch.mockResolvedValue([
{
id: 'chunk-3',
@@ -1173,16 +1176,14 @@ describe('Knowledge Search API Route', () => {
const mockTagDefs = {
select: vi.fn().mockReturnThis(),
from: vi.fn().mockReturnThis(),
where: vi
.fn()
.mockResolvedValue([{ tagSlot: 'tag1', displayName: 'tag1', fieldType: 'text' }]),
where: vi.fn().mockResolvedValue([]),
}
mockDbChain.select.mockReturnValueOnce(mockTagDefs)
const req = createMockRequest('POST', {
knowledgeBaseIds: ['kb-123'],
query: 'relevant content',
tagFilters: [{ tagName: 'tag1', value: 'guide', fieldType: 'text', operator: 'eq' }],
filters: { tag1: 'guide' },
topK: 10,
})

View File

@@ -1,10 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { generateRequestId } from '@/lib/core/utils/request'
import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants'
import { TAG_SLOTS } from '@/lib/knowledge/constants'
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
import { buildUndefinedTagsError, validateTagValue } from '@/lib/knowledge/tags/utils'
import type { StructuredFilter } from '@/lib/knowledge/types'
import { createLogger } from '@/lib/logs/console/logger'
import { estimateTokenCount } from '@/lib/tokenization/estimators'
import { getUserId } from '@/app/api/auth/oauth/utils'
@@ -22,16 +20,6 @@ import { calculateCost } from '@/providers/utils'
const logger = createLogger('VectorSearchAPI')
/** Structured tag filter with operator support */
const StructuredTagFilterSchema = z.object({
tagName: z.string(),
tagSlot: z.string().optional(),
fieldType: z.enum(['text', 'number', 'date', 'boolean']).default('text'),
operator: z.string().default('eq'),
value: z.union([z.string(), z.number(), z.boolean()]),
valueTo: z.union([z.string(), z.number()]).optional(),
})
const VectorSearchSchema = z
.object({
knowledgeBaseIds: z.union([
@@ -51,17 +39,18 @@ const VectorSearchSchema = z
.nullable()
.default(10)
.transform((val) => val ?? 10),
tagFilters: z
.array(StructuredTagFilterSchema)
filters: z
.record(z.string())
.optional()
.nullable()
.transform((val) => val || undefined),
.transform((val) => val || undefined), // Allow dynamic filter keys (display names)
})
.refine(
(data) => {
// Ensure at least query or filters are provided
const hasQuery = data.query && data.query.trim().length > 0
const hasTagFilters = data.tagFilters && data.tagFilters.length > 0
return hasQuery || hasTagFilters
const hasFilters = data.filters && Object.keys(data.filters).length > 0
return hasQuery || hasFilters
},
{
message: 'Please provide either a search query or tag filters to search your knowledge base',
@@ -99,81 +88,45 @@ export async function POST(request: NextRequest) {
)
// Map display names to tag slots for filtering
let structuredFilters: StructuredFilter[] = []
let mappedFilters: Record<string, string> = {}
if (validatedData.filters && accessibleKbIds.length > 0) {
try {
// Fetch tag definitions for the first accessible KB (since we're using single KB now)
const kbId = accessibleKbIds[0]
const tagDefs = await getDocumentTagDefinitions(kbId)
// Handle tag filters
if (validatedData.tagFilters && accessibleKbIds.length > 0) {
const kbId = accessibleKbIds[0]
const tagDefs = await getDocumentTagDefinitions(kbId)
logger.debug(`[${requestId}] Found tag definitions:`, tagDefs)
logger.debug(`[${requestId}] Original filters:`, validatedData.filters)
// Create mapping from display name to tag slot and fieldType
const displayNameToTagDef: Record<string, { tagSlot: string; fieldType: string }> = {}
tagDefs.forEach((def) => {
displayNameToTagDef[def.displayName] = {
tagSlot: def.tagSlot,
fieldType: def.fieldType,
}
})
// Create mapping from display name to tag slot
const displayNameToSlot: Record<string, string> = {}
tagDefs.forEach((def) => {
displayNameToSlot[def.displayName] = def.tagSlot
})
// Validate all tag filters first
const undefinedTags: string[] = []
const typeErrors: string[] = []
// Map the filters and handle OR logic
Object.entries(validatedData.filters).forEach(([key, value]) => {
if (value) {
const tagSlot = displayNameToSlot[key] || key // Fallback to key if no mapping found
for (const filter of validatedData.tagFilters) {
const tagDef = displayNameToTagDef[filter.tagName]
// Check if this is an OR filter (contains |OR| separator)
if (value.includes('|OR|')) {
logger.debug(
`[${requestId}] OR filter detected: "${key}" -> "${tagSlot}" = "${value}"`
)
}
// Check if tag exists
if (!tagDef) {
undefinedTags.push(filter.tagName)
continue
}
mappedFilters[tagSlot] = value
logger.debug(`[${requestId}] Mapped filter: "${key}" -> "${tagSlot}" = "${value}"`)
}
})
// Validate value type using shared validation
const validationError = validateTagValue(
filter.tagName,
String(filter.value),
tagDef.fieldType
)
if (validationError) {
typeErrors.push(validationError)
}
logger.debug(`[${requestId}] Final mapped filters:`, mappedFilters)
} catch (error) {
logger.error(`[${requestId}] Filter mapping error:`, error)
// If mapping fails, use original filters
mappedFilters = validatedData.filters
}
// Throw combined error if there are any validation issues
if (undefinedTags.length > 0 || typeErrors.length > 0) {
const errorParts: string[] = []
if (undefinedTags.length > 0) {
errorParts.push(buildUndefinedTagsError(undefinedTags))
}
if (typeErrors.length > 0) {
errorParts.push(...typeErrors)
}
return NextResponse.json({ error: errorParts.join('\n') }, { status: 400 })
}
// Build structured filters with validated data
structuredFilters = validatedData.tagFilters.map((filter) => {
const tagDef = displayNameToTagDef[filter.tagName]!
const tagSlot = filter.tagSlot || tagDef.tagSlot
const fieldType = filter.fieldType || tagDef.fieldType
logger.debug(
`[${requestId}] Structured filter: ${filter.tagName} -> ${tagSlot} (${fieldType}) ${filter.operator} ${filter.value}`
)
return {
tagSlot,
fieldType,
operator: filter.operator,
value: filter.value,
valueTo: filter.valueTo,
}
})
logger.debug(`[${requestId}] Processed ${structuredFilters.length} structured filters`)
}
if (accessibleKbIds.length === 0) {
@@ -202,29 +155,26 @@ export async function POST(request: NextRequest) {
let results: SearchResult[]
const hasFilters = structuredFilters && structuredFilters.length > 0
const hasFilters = mappedFilters && Object.keys(mappedFilters).length > 0
if (!hasQuery && hasFilters) {
// Tag-only search without vector similarity
logger.debug(`[${requestId}] Executing tag-only search with filters:`, structuredFilters)
logger.debug(`[${requestId}] Executing tag-only search with filters:`, mappedFilters)
results = await handleTagOnlySearch({
knowledgeBaseIds: accessibleKbIds,
topK: validatedData.topK,
structuredFilters,
filters: mappedFilters,
})
} else if (hasQuery && hasFilters) {
// Tag + Vector search
logger.debug(
`[${requestId}] Executing tag + vector search with filters:`,
structuredFilters
)
logger.debug(`[${requestId}] Executing tag + vector search with filters:`, mappedFilters)
const strategy = getQueryStrategy(accessibleKbIds.length, validatedData.topK)
const queryVector = JSON.stringify(await queryEmbeddingPromise)
results = await handleTagAndVectorSearch({
knowledgeBaseIds: accessibleKbIds,
topK: validatedData.topK,
structuredFilters,
filters: mappedFilters,
queryVector,
distanceThreshold: strategy.distanceThreshold,
})
@@ -307,9 +257,9 @@ export async function POST(request: NextRequest) {
// Create tags object with display names
const tags: Record<string, any> = {}
ALL_TAG_SLOTS.forEach((slot) => {
TAG_SLOTS.forEach((slot) => {
const tagValue = (result as any)[slot]
if (tagValue !== null && tagValue !== undefined) {
if (tagValue) {
const displayName = kbTagMap[slot] || slot
logger.debug(
`[${requestId}] Mapping ${slot}="${tagValue}" -> "${displayName}"="${tagValue}"`

View File

@@ -54,7 +54,7 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [],
filters: {},
}
await expect(handleTagOnlySearch(params)).rejects.toThrow(
@@ -66,14 +66,14 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api' }],
filters: { tag1: 'api' },
}
// This test validates the function accepts the right parameters
// The actual database interaction is tested via route tests
expect(params.knowledgeBaseIds).toEqual(['kb-123'])
expect(params.topK).toBe(10)
expect(params.structuredFilters).toHaveLength(1)
expect(params.filters).toEqual({ tag1: 'api' })
})
})
@@ -123,7 +123,7 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [],
filters: {},
queryVector: JSON.stringify([0.1, 0.2, 0.3]),
distanceThreshold: 0.8,
}
@@ -137,7 +137,7 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api' }],
filters: { tag1: 'api' },
distanceThreshold: 0.8,
}
@@ -150,7 +150,7 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api' }],
filters: { tag1: 'api' },
queryVector: JSON.stringify([0.1, 0.2, 0.3]),
}
@@ -163,7 +163,7 @@ describe('Knowledge Search Utils', () => {
const params = {
knowledgeBaseIds: ['kb-123'],
topK: 10,
structuredFilters: [{ tagSlot: 'tag1', fieldType: 'text', operator: 'eq', value: 'api' }],
filters: { tag1: 'api' },
queryVector: JSON.stringify([0.1, 0.2, 0.3]),
distanceThreshold: 0.8,
}
@@ -171,7 +171,7 @@ describe('Knowledge Search Utils', () => {
// This test validates the function accepts the right parameters
expect(params.knowledgeBaseIds).toEqual(['kb-123'])
expect(params.topK).toBe(10)
expect(params.structuredFilters).toHaveLength(1)
expect(params.filters).toEqual({ tag1: 'api' })
expect(params.queryVector).toBe(JSON.stringify([0.1, 0.2, 0.3]))
expect(params.distanceThreshold).toBe(0.8)
})

View File

@@ -1,7 +1,6 @@
import { db } from '@sim/db'
import { document, embedding } from '@sim/db/schema'
import { and, eq, inArray, isNull, sql } from 'drizzle-orm'
import type { StructuredFilter } from '@/lib/knowledge/types'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('KnowledgeSearchUtils')
@@ -35,7 +34,6 @@ export interface SearchResult {
content: string
documentId: string
chunkIndex: number
// Text tags
tag1: string | null
tag2: string | null
tag3: string | null
@@ -43,19 +41,6 @@ export interface SearchResult {
tag5: string | null
tag6: string | null
tag7: string | null
// Number tags (5 slots)
number1: number | null
number2: number | null
number3: number | null
number4: number | null
number5: number | null
// Date tags (2 slots)
date1: Date | null
date2: Date | null
// Boolean tags (3 slots)
boolean1: boolean | null
boolean2: boolean | null
boolean3: boolean | null
distance: number
knowledgeBaseId: string
}
@@ -63,7 +48,7 @@ export interface SearchResult {
export interface SearchParams {
knowledgeBaseIds: string[]
topK: number
structuredFilters?: StructuredFilter[]
filters?: Record<string, string>
queryVector?: string
distanceThreshold?: number
}
@@ -71,230 +56,46 @@ export interface SearchParams {
// Use shared embedding utility
export { generateSearchEmbedding } from '@/lib/knowledge/embeddings'
/** All valid tag slot keys */
const TAG_SLOT_KEYS = [
// Text tags (7 slots)
'tag1',
'tag2',
'tag3',
'tag4',
'tag5',
'tag6',
'tag7',
// Number tags (5 slots)
'number1',
'number2',
'number3',
'number4',
'number5',
// Date tags (2 slots)
'date1',
'date2',
// Boolean tags (3 slots)
'boolean1',
'boolean2',
'boolean3',
] as const
function getTagFilters(filters: Record<string, string>, embedding: any) {
return Object.entries(filters).map(([key, value]) => {
// Handle OR logic within same tag
const values = value.includes('|OR|') ? value.split('|OR|') : [value]
logger.debug(`[getTagFilters] Processing ${key}="${value}" -> values:`, values)
type TagSlotKey = (typeof TAG_SLOT_KEYS)[number]
function isTagSlotKey(key: string): key is TagSlotKey {
return TAG_SLOT_KEYS.includes(key as TagSlotKey)
}
/** Common fields selected for search results */
const getSearchResultFields = (distanceExpr: any) => ({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
// Text tags
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
// Number tags (5 slots)
number1: embedding.number1,
number2: embedding.number2,
number3: embedding.number3,
number4: embedding.number4,
number5: embedding.number5,
// Date tags (2 slots)
date1: embedding.date1,
date2: embedding.date2,
// Boolean tags (3 slots)
boolean1: embedding.boolean1,
boolean2: embedding.boolean2,
boolean3: embedding.boolean3,
distance: distanceExpr,
knowledgeBaseId: embedding.knowledgeBaseId,
})
/**
* Build a single SQL condition for a filter
*/
function buildFilterCondition(filter: StructuredFilter, embeddingTable: any) {
const { tagSlot, fieldType, operator, value, valueTo } = filter
if (!isTagSlotKey(tagSlot)) {
logger.debug(`[getStructuredTagFilters] Unknown tag slot: ${tagSlot}`)
return null
}
const column = embeddingTable[tagSlot]
if (!column) return null
logger.debug(
`[getStructuredTagFilters] Processing ${tagSlot} (${fieldType}) ${operator} ${value}`
)
// Handle text operators
if (fieldType === 'text') {
const stringValue = String(value)
switch (operator) {
case 'eq':
return sql`LOWER(${column}) = LOWER(${stringValue})`
case 'neq':
return sql`LOWER(${column}) != LOWER(${stringValue})`
case 'contains':
return sql`LOWER(${column}) LIKE LOWER(${`%${stringValue}%`})`
case 'not_contains':
return sql`LOWER(${column}) NOT LIKE LOWER(${`%${stringValue}%`})`
case 'starts_with':
return sql`LOWER(${column}) LIKE LOWER(${`${stringValue}%`})`
case 'ends_with':
return sql`LOWER(${column}) LIKE LOWER(${`%${stringValue}`})`
default:
return sql`LOWER(${column}) = LOWER(${stringValue})`
}
}
// Handle number operators
if (fieldType === 'number') {
const numValue = typeof value === 'number' ? value : Number.parseFloat(String(value))
if (Number.isNaN(numValue)) return null
switch (operator) {
case 'eq':
return sql`${column} = ${numValue}`
case 'neq':
return sql`${column} != ${numValue}`
case 'gt':
return sql`${column} > ${numValue}`
case 'gte':
return sql`${column} >= ${numValue}`
case 'lt':
return sql`${column} < ${numValue}`
case 'lte':
return sql`${column} <= ${numValue}`
case 'between':
if (valueTo !== undefined) {
const numValueTo =
typeof valueTo === 'number' ? valueTo : Number.parseFloat(String(valueTo))
if (Number.isNaN(numValueTo)) return sql`${column} = ${numValue}`
return sql`${column} >= ${numValue} AND ${column} <= ${numValueTo}`
}
return sql`${column} = ${numValue}`
default:
return sql`${column} = ${numValue}`
}
}
// Handle date operators - expects YYYY-MM-DD format from frontend
if (fieldType === 'date') {
const dateStr = String(value)
// Validate YYYY-MM-DD format
if (!/^\d{4}-\d{2}-\d{2}$/.test(dateStr)) {
logger.debug(`[getStructuredTagFilters] Invalid date format: ${dateStr}, expected YYYY-MM-DD`)
return null
const getColumnForKey = (key: string) => {
switch (key) {
case 'tag1':
return embedding.tag1
case 'tag2':
return embedding.tag2
case 'tag3':
return embedding.tag3
case 'tag4':
return embedding.tag4
case 'tag5':
return embedding.tag5
case 'tag6':
return embedding.tag6
case 'tag7':
return embedding.tag7
default:
return null
}
}
switch (operator) {
case 'eq':
return sql`${column}::date = ${dateStr}::date`
case 'neq':
return sql`${column}::date != ${dateStr}::date`
case 'gt':
return sql`${column}::date > ${dateStr}::date`
case 'gte':
return sql`${column}::date >= ${dateStr}::date`
case 'lt':
return sql`${column}::date < ${dateStr}::date`
case 'lte':
return sql`${column}::date <= ${dateStr}::date`
case 'between':
if (valueTo !== undefined) {
const dateStrTo = String(valueTo)
if (!/^\d{4}-\d{2}-\d{2}$/.test(dateStrTo)) {
return sql`${column}::date = ${dateStr}::date`
}
return sql`${column}::date >= ${dateStr}::date AND ${column}::date <= ${dateStrTo}::date`
}
return sql`${column}::date = ${dateStr}::date`
default:
return sql`${column}::date = ${dateStr}::date`
const column = getColumnForKey(key)
if (!column) return sql`1=1` // No-op for unknown keys
if (values.length === 1) {
// Single value - simple equality
logger.debug(`[getTagFilters] Single value filter: ${key} = ${values[0]}`)
return sql`LOWER(${column}) = LOWER(${values[0]})`
}
}
// Handle boolean operators
if (fieldType === 'boolean') {
const boolValue = value === true || value === 'true'
switch (operator) {
case 'eq':
return sql`${column} = ${boolValue}`
case 'neq':
return sql`${column} != ${boolValue}`
default:
return sql`${column} = ${boolValue}`
}
}
// Fallback to equality
return sql`${column} = ${value}`
}
/**
* Build SQL conditions from structured filters with operator support
* - Same tag multiple times: OR logic
* - Different tags: AND logic
*/
function getStructuredTagFilters(filters: StructuredFilter[], embeddingTable: any) {
// Group filters by tagSlot
const filtersBySlot = new Map<string, StructuredFilter[]>()
for (const filter of filters) {
const slot = filter.tagSlot
if (!filtersBySlot.has(slot)) {
filtersBySlot.set(slot, [])
}
filtersBySlot.get(slot)!.push(filter)
}
// Build conditions: OR within same slot, AND across different slots
const conditions: ReturnType<typeof sql>[] = []
for (const [slot, slotFilters] of filtersBySlot) {
const slotConditions = slotFilters
.map((f) => buildFilterCondition(f, embeddingTable))
.filter((c): c is ReturnType<typeof sql> => c !== null)
if (slotConditions.length === 0) continue
if (slotConditions.length === 1) {
// Single condition for this slot
conditions.push(slotConditions[0])
} else {
// Multiple conditions for same slot - OR them together
logger.debug(
`[getStructuredTagFilters] OR'ing ${slotConditions.length} conditions for ${slot}`
)
conditions.push(sql`(${sql.join(slotConditions, sql` OR `)})`)
}
}
return conditions
// Multiple values - OR logic
logger.debug(`[getTagFilters] OR filter: ${key} IN (${values.join(', ')})`)
const orConditions = values.map((v) => sql`LOWER(${column}) = LOWER(${v})`)
return sql`(${sql.join(orConditions, sql` OR `)})`
})
}
export function getQueryStrategy(kbCount: number, topK: number) {
@@ -312,10 +113,8 @@ export function getQueryStrategy(kbCount: number, topK: number) {
async function executeTagFilterQuery(
knowledgeBaseIds: string[],
structuredFilters: StructuredFilter[]
filters: Record<string, string>
): Promise<{ id: string }[]> {
const tagFilterConditions = getStructuredTagFilters(structuredFilters, embedding)
if (knowledgeBaseIds.length === 1) {
return await db
.select({ id: embedding.id })
@@ -326,7 +125,7 @@ async function executeTagFilterQuery(
eq(embedding.knowledgeBaseId, knowledgeBaseIds[0]),
eq(embedding.enabled, true),
isNull(document.deletedAt),
...tagFilterConditions
...getTagFilters(filters, embedding)
)
)
}
@@ -339,7 +138,7 @@ async function executeTagFilterQuery(
inArray(embedding.knowledgeBaseId, knowledgeBaseIds),
eq(embedding.enabled, true),
isNull(document.deletedAt),
...tagFilterConditions
...getTagFilters(filters, embedding)
)
)
}
@@ -355,11 +154,21 @@ async function executeVectorSearchOnIds(
}
return await db
.select(
getSearchResultFields(
sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance')
)
)
.select({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
knowledgeBaseId: embedding.knowledgeBaseId,
})
.from(embedding)
.innerJoin(document, eq(embedding.documentId, document.id))
.where(
@@ -374,16 +183,15 @@ async function executeVectorSearchOnIds(
}
export async function handleTagOnlySearch(params: SearchParams): Promise<SearchResult[]> {
const { knowledgeBaseIds, topK, structuredFilters } = params
const { knowledgeBaseIds, topK, filters } = params
if (!structuredFilters || structuredFilters.length === 0) {
if (!filters || Object.keys(filters).length === 0) {
throw new Error('Tag filters are required for tag-only search')
}
logger.debug(`[handleTagOnlySearch] Executing tag-only search with filters:`, structuredFilters)
logger.debug(`[handleTagOnlySearch] Executing tag-only search with filters:`, filters)
const strategy = getQueryStrategy(knowledgeBaseIds.length, topK)
const tagFilterConditions = getStructuredTagFilters(structuredFilters, embedding)
if (strategy.useParallel) {
// Parallel approach for many KBs
@@ -391,7 +199,21 @@ export async function handleTagOnlySearch(params: SearchParams): Promise<SearchR
const queryPromises = knowledgeBaseIds.map(async (kbId) => {
return await db
.select(getSearchResultFields(sql<number>`0`.as('distance')))
.select({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
distance: sql<number>`0`.as('distance'), // No distance for tag-only searches
knowledgeBaseId: embedding.knowledgeBaseId,
})
.from(embedding)
.innerJoin(document, eq(embedding.documentId, document.id))
.where(
@@ -399,7 +221,7 @@ export async function handleTagOnlySearch(params: SearchParams): Promise<SearchR
eq(embedding.knowledgeBaseId, kbId),
eq(embedding.enabled, true),
isNull(document.deletedAt),
...tagFilterConditions
...getTagFilters(filters, embedding)
)
)
.limit(parallelLimit)
@@ -410,7 +232,21 @@ export async function handleTagOnlySearch(params: SearchParams): Promise<SearchR
}
// Single query for fewer KBs
return await db
.select(getSearchResultFields(sql<number>`0`.as('distance')))
.select({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
distance: sql<number>`0`.as('distance'), // No distance for tag-only searches
knowledgeBaseId: embedding.knowledgeBaseId,
})
.from(embedding)
.innerJoin(document, eq(embedding.documentId, document.id))
.where(
@@ -418,7 +254,7 @@ export async function handleTagOnlySearch(params: SearchParams): Promise<SearchR
inArray(embedding.knowledgeBaseId, knowledgeBaseIds),
eq(embedding.enabled, true),
isNull(document.deletedAt),
...tagFilterConditions
...getTagFilters(filters, embedding)
)
)
.limit(topK)
@@ -435,15 +271,27 @@ export async function handleVectorOnlySearch(params: SearchParams): Promise<Sear
const strategy = getQueryStrategy(knowledgeBaseIds.length, topK)
const distanceExpr = sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance')
if (strategy.useParallel) {
// Parallel approach for many KBs
const parallelLimit = Math.ceil(topK / knowledgeBaseIds.length) + 5
const queryPromises = knowledgeBaseIds.map(async (kbId) => {
return await db
.select(getSearchResultFields(distanceExpr))
.select({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
knowledgeBaseId: embedding.knowledgeBaseId,
})
.from(embedding)
.innerJoin(document, eq(embedding.documentId, document.id))
.where(
@@ -464,7 +312,21 @@ export async function handleVectorOnlySearch(params: SearchParams): Promise<Sear
}
// Single query for fewer KBs
return await db
.select(getSearchResultFields(distanceExpr))
.select({
id: embedding.id,
content: embedding.content,
documentId: embedding.documentId,
chunkIndex: embedding.chunkIndex,
tag1: embedding.tag1,
tag2: embedding.tag2,
tag3: embedding.tag3,
tag4: embedding.tag4,
tag5: embedding.tag5,
tag6: embedding.tag6,
tag7: embedding.tag7,
distance: sql<number>`${embedding.embedding} <=> ${queryVector}::vector`.as('distance'),
knowledgeBaseId: embedding.knowledgeBaseId,
})
.from(embedding)
.innerJoin(document, eq(embedding.documentId, document.id))
.where(
@@ -480,22 +342,19 @@ export async function handleVectorOnlySearch(params: SearchParams): Promise<Sear
}
export async function handleTagAndVectorSearch(params: SearchParams): Promise<SearchResult[]> {
const { knowledgeBaseIds, topK, structuredFilters, queryVector, distanceThreshold } = params
const { knowledgeBaseIds, topK, filters, queryVector, distanceThreshold } = params
if (!structuredFilters || structuredFilters.length === 0) {
if (!filters || Object.keys(filters).length === 0) {
throw new Error('Tag filters are required for tag and vector search')
}
if (!queryVector || !distanceThreshold) {
throw new Error('Query vector and distance threshold are required for tag and vector search')
}
logger.debug(
`[handleTagAndVectorSearch] Executing tag + vector search with filters:`,
structuredFilters
)
logger.debug(`[handleTagAndVectorSearch] Executing tag + vector search with filters:`, filters)
// Step 1: Filter by tags first
const tagFilteredIds = await executeTagFilterQuery(knowledgeBaseIds, structuredFilters)
const tagFilteredIds = await executeTagFilterQuery(knowledgeBaseIds, filters)
if (tagFilteredIds.length === 0) {
logger.debug(`[handleTagAndVectorSearch] No results found after tag filtering`)

View File

@@ -35,7 +35,7 @@ export interface DocumentData {
enabled: boolean
deletedAt?: Date | null
uploadedAt: Date
// Text tags
// Document tags
tag1?: string | null
tag2?: string | null
tag3?: string | null
@@ -43,19 +43,6 @@ export interface DocumentData {
tag5?: string | null
tag6?: string | null
tag7?: string | null
// Number tags (5 slots)
number1?: number | null
number2?: number | null
number3?: number | null
number4?: number | null
number5?: number | null
// Date tags (2 slots)
date1?: Date | null
date2?: Date | null
// Boolean tags (3 slots)
boolean1?: boolean | null
boolean2?: boolean | null
boolean3?: boolean | null
}
export interface EmbeddingData {
@@ -71,7 +58,7 @@ export interface EmbeddingData {
embeddingModel: string
startOffset: number
endOffset: number
// Text tags
// Tag fields for filtering
tag1?: string | null
tag2?: string | null
tag3?: string | null
@@ -79,19 +66,6 @@ export interface EmbeddingData {
tag5?: string | null
tag6?: string | null
tag7?: string | null
// Number tags (5 slots)
number1?: number | null
number2?: number | null
number3?: number | null
number4?: number | null
number5?: number | null
// Date tags (2 slots)
date1?: Date | null
date2?: Date | null
// Boolean tags (3 slots)
boolean1?: boolean | null
boolean2?: boolean | null
boolean3?: boolean | null
enabled: boolean
createdAt: Date
updatedAt: Date
@@ -258,27 +232,6 @@ export async function checkDocumentWriteAccess(
processingStartedAt: document.processingStartedAt,
processingCompletedAt: document.processingCompletedAt,
knowledgeBaseId: document.knowledgeBaseId,
// Text tags
tag1: document.tag1,
tag2: document.tag2,
tag3: document.tag3,
tag4: document.tag4,
tag5: document.tag5,
tag6: document.tag6,
tag7: document.tag7,
// Number tags (5 slots)
number1: document.number1,
number2: document.number2,
number3: document.number3,
number4: document.number4,
number5: document.number5,
// Date tags (2 slots)
date1: document.date1,
date2: document.date2,
// Boolean tags (3 slots)
boolean1: document.boolean1,
boolean2: document.boolean2,
boolean3: document.boolean3,
})
.from(document)
.where(and(eq(document.id, documentId), isNull(document.deletedAt)))

View File

@@ -1,72 +1,32 @@
import { db } from '@sim/db'
import {
permissions,
workflow,
workflowExecutionLogs,
workflowExecutionSnapshots,
} from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { workflowExecutionLogs, workflowExecutionSnapshots } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('LogsByExecutionIdAPI')
export async function GET(
request: NextRequest,
_request: NextRequest,
{ params }: { params: Promise<{ executionId: string }> }
) {
const requestId = generateRequestId()
try {
const { executionId } = await params
const authResult = await checkHybridAuth(request, { requireWorkflowId: false })
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized execution data access attempt for: ${executionId}`)
return NextResponse.json(
{ error: authResult.error || 'Authentication required' },
{ status: 401 }
)
}
const authenticatedUserId = authResult.userId
logger.debug(
`[${requestId}] Fetching execution data for: ${executionId} (auth: ${authResult.authType})`
)
logger.debug(`Fetching execution data for: ${executionId}`)
// Get the workflow execution log to find the snapshot
const [workflowLog] = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
cost: workflowExecutionLogs.cost,
})
.select()
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, authenticatedUserId)
)
)
.where(eq(workflowExecutionLogs.executionId, executionId))
.limit(1)
if (!workflowLog) {
logger.warn(`[${requestId}] Execution not found or access denied: ${executionId}`)
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
}
// Get the workflow state snapshot
const [snapshot] = await db
.select()
.from(workflowExecutionSnapshots)
@@ -74,7 +34,6 @@ export async function GET(
.limit(1)
if (!snapshot) {
logger.warn(`[${requestId}] Workflow state snapshot not found for execution: ${executionId}`)
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
}
@@ -91,14 +50,14 @@ export async function GET(
},
}
logger.debug(`[${requestId}] Successfully fetched execution data for: ${executionId}`)
logger.debug(`Successfully fetched execution data for: ${executionId}`)
logger.debug(
`[${requestId}] Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
)
return NextResponse.json(response)
} catch (error) {
logger.error(`[${requestId}] Error fetching execution data:`, error)
logger.error('Error fetching execution data:', error)
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
}
}

View File

@@ -0,0 +1,129 @@
import { db } from '@sim/db'
import { permissions, workflowMcpServer, workspace } from '@sim/db/schema'
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('McpDiscoverAPI')
export const dynamic = 'force-dynamic'
/**
* GET - Discover all published MCP servers available to the authenticated user
*
* This endpoint allows external MCP clients to discover available servers
* using just their API key, without needing to know workspace IDs.
*
* Authentication: API Key (X-API-Key header) or Session
*
* Returns all published MCP servers from workspaces the user has access to.
*/
export async function GET(request: NextRequest) {
try {
// Authenticate the request
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json(
{
success: false,
error: 'Authentication required. Provide X-API-Key header with your Sim API key.',
},
{ status: 401 }
)
}
const userId = auth.userId
// Get all workspaces the user has access to via permissions table
const userWorkspacePermissions = await db
.select({ entityId: permissions.entityId })
.from(permissions)
.where(and(eq(permissions.userId, userId), eq(permissions.entityType, 'workspace')))
const workspaceIds = userWorkspacePermissions.map((w) => w.entityId)
if (workspaceIds.length === 0) {
return NextResponse.json({
success: true,
servers: [],
message: 'No workspaces found for this user',
})
}
// Get all published MCP servers from user's workspaces with tool count
const servers = await db
.select({
id: workflowMcpServer.id,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
workspaceId: workflowMcpServer.workspaceId,
workspaceName: workspace.name,
isPublished: workflowMcpServer.isPublished,
publishedAt: workflowMcpServer.publishedAt,
toolCount: sql<number>`(
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
)`.as('tool_count'),
})
.from(workflowMcpServer)
.leftJoin(workspace, eq(workflowMcpServer.workspaceId, workspace.id))
.where(
and(
eq(workflowMcpServer.isPublished, true),
sql`${workflowMcpServer.workspaceId} IN ${workspaceIds}`
)
)
.orderBy(workflowMcpServer.name)
const baseUrl = getBaseUrl()
// Format response with connection URLs
const formattedServers = servers.map((server) => ({
id: server.id,
name: server.name,
description: server.description,
workspace: {
id: server.workspaceId,
name: server.workspaceName,
},
toolCount: server.toolCount || 0,
publishedAt: server.publishedAt,
urls: {
http: `${baseUrl}/api/mcp/serve/${server.id}`,
sse: `${baseUrl}/api/mcp/serve/${server.id}/sse`,
},
}))
logger.info(`User ${userId} discovered ${formattedServers.length} MCP servers`)
return NextResponse.json({
success: true,
servers: formattedServers,
authentication: {
method: 'API Key',
header: 'X-API-Key',
description: 'Include your Sim API key in the X-API-Key header for all MCP requests',
},
usage: {
listTools: {
method: 'POST',
body: '{"jsonrpc":"2.0","id":1,"method":"tools/list"}',
},
callTool: {
method: 'POST',
body: '{"jsonrpc":"2.0","id":1,"method":"tools/call","params":{"name":"TOOL_NAME","arguments":{}}}',
},
},
})
} catch (error) {
logger.error('Error discovering MCP servers:', error)
return NextResponse.json(
{ success: false, error: 'Failed to discover MCP servers' },
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,360 @@
import { db } from '@sim/db'
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('WorkflowMcpServeAPI')
export const dynamic = 'force-dynamic'
interface RouteParams {
serverId: string
}
/**
* MCP JSON-RPC Request
*/
interface JsonRpcRequest {
jsonrpc: '2.0'
id: string | number
method: string
params?: Record<string, unknown>
}
/**
* MCP JSON-RPC Response
*/
interface JsonRpcResponse {
jsonrpc: '2.0'
id: string | number
result?: unknown
error?: {
code: number
message: string
data?: unknown
}
}
/**
* Create JSON-RPC success response
*/
function createJsonRpcResponse(id: string | number, result: unknown): JsonRpcResponse {
return {
jsonrpc: '2.0',
id,
result,
}
}
/**
* Create JSON-RPC error response
*/
function createJsonRpcError(
id: string | number,
code: number,
message: string,
data?: unknown
): JsonRpcResponse {
return {
jsonrpc: '2.0',
id,
error: { code, message, data },
}
}
/**
* Validate that the server exists and is published
*/
async function validateServer(serverId: string) {
const [server] = await db
.select({
id: workflowMcpServer.id,
name: workflowMcpServer.name,
workspaceId: workflowMcpServer.workspaceId,
isPublished: workflowMcpServer.isPublished,
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.id, serverId))
.limit(1)
return server
}
/**
* GET - Server info and capabilities (MCP initialize)
*/
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { serverId } = await params
try {
const server = await validateServer(serverId)
if (!server) {
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
if (!server.isPublished) {
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
}
// Return server capabilities
return NextResponse.json({
name: server.name,
version: '1.0.0',
protocolVersion: '2024-11-05',
capabilities: {
tools: {},
},
instructions: `This MCP server exposes workflow tools from Sim Studio. Each tool executes a deployed workflow.`,
})
} catch (error) {
logger.error('Error getting MCP server info:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST - Handle MCP JSON-RPC requests
*/
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { serverId } = await params
try {
// Validate server
const server = await validateServer(serverId)
if (!server) {
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
if (!server.isPublished) {
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
}
// Authenticate the request
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Parse JSON-RPC request
const body = await request.json()
const rpcRequest = body as JsonRpcRequest
if (rpcRequest.jsonrpc !== '2.0' || !rpcRequest.method) {
return NextResponse.json(createJsonRpcError(rpcRequest?.id || 0, -32600, 'Invalid Request'), {
status: 400,
})
}
// Handle different MCP methods
switch (rpcRequest.method) {
case 'initialize':
return NextResponse.json(
createJsonRpcResponse(rpcRequest.id, {
protocolVersion: '2024-11-05',
capabilities: {
tools: {},
},
serverInfo: {
name: server.name,
version: '1.0.0',
},
})
)
case 'tools/list':
return handleToolsList(rpcRequest, serverId)
case 'tools/call': {
// Get the API key from the request to forward to the workflow execute call
const apiKey =
request.headers.get('X-API-Key') ||
request.headers.get('Authorization')?.replace('Bearer ', '')
return handleToolsCall(rpcRequest, serverId, auth.userId, server.workspaceId, apiKey)
}
case 'ping':
return NextResponse.json(createJsonRpcResponse(rpcRequest.id, {}))
default:
return NextResponse.json(
createJsonRpcError(rpcRequest.id, -32601, `Method not found: ${rpcRequest.method}`),
{ status: 404 }
)
}
} catch (error) {
logger.error('Error handling MCP request:', error)
return NextResponse.json(createJsonRpcError(0, -32603, 'Internal error'), { status: 500 })
}
}
/**
* Handle tools/list method
*/
async function handleToolsList(
rpcRequest: JsonRpcRequest,
serverId: string
): Promise<NextResponse> {
try {
const tools = await db
.select({
id: workflowMcpTool.id,
toolName: workflowMcpTool.toolName,
toolDescription: workflowMcpTool.toolDescription,
parameterSchema: workflowMcpTool.parameterSchema,
isEnabled: workflowMcpTool.isEnabled,
workflowId: workflowMcpTool.workflowId,
})
.from(workflowMcpTool)
.where(eq(workflowMcpTool.serverId, serverId))
const mcpTools = tools
.filter((tool) => tool.isEnabled)
.map((tool) => ({
name: tool.toolName,
description: tool.toolDescription || `Execute workflow tool: ${tool.toolName}`,
inputSchema: tool.parameterSchema || {
type: 'object',
properties: {
input: {
type: 'object',
description: 'Input data for the workflow',
},
},
},
}))
return NextResponse.json(createJsonRpcResponse(rpcRequest.id, { tools: mcpTools }))
} catch (error) {
logger.error('Error listing tools:', error)
return NextResponse.json(createJsonRpcError(rpcRequest.id, -32603, 'Failed to list tools'), {
status: 500,
})
}
}
/**
* Handle tools/call method
*/
async function handleToolsCall(
rpcRequest: JsonRpcRequest,
serverId: string,
userId: string,
workspaceId: string,
apiKey?: string | null
): Promise<NextResponse> {
try {
const params = rpcRequest.params as
| { name: string; arguments?: Record<string, unknown> }
| undefined
if (!params?.name) {
return NextResponse.json(
createJsonRpcError(rpcRequest.id, -32602, 'Invalid params: tool name required'),
{ status: 400 }
)
}
// Find the tool
const [tool] = await db
.select({
id: workflowMcpTool.id,
toolName: workflowMcpTool.toolName,
workflowId: workflowMcpTool.workflowId,
isEnabled: workflowMcpTool.isEnabled,
})
.from(workflowMcpTool)
.where(eq(workflowMcpTool.serverId, serverId))
.then((tools) => tools.filter((t) => t.toolName === params.name))
if (!tool) {
return NextResponse.json(
createJsonRpcError(rpcRequest.id, -32602, `Tool not found: ${params.name}`),
{ status: 404 }
)
}
if (!tool.isEnabled) {
return NextResponse.json(
createJsonRpcError(rpcRequest.id, -32602, `Tool is disabled: ${params.name}`),
{ status: 400 }
)
}
// Verify workflow is still deployed
const [workflowRecord] = await db
.select({ id: workflow.id, isDeployed: workflow.isDeployed })
.from(workflow)
.where(eq(workflow.id, tool.workflowId))
.limit(1)
if (!workflowRecord || !workflowRecord.isDeployed) {
return NextResponse.json(
createJsonRpcError(rpcRequest.id, -32603, 'Workflow is not deployed'),
{ status: 400 }
)
}
// Execute the workflow
const baseUrl = getBaseUrl()
const executeUrl = `${baseUrl}/api/workflows/${tool.workflowId}/execute`
logger.info(`Executing workflow ${tool.workflowId} via MCP tool ${params.name}`)
// Build headers for the internal execute call
const executeHeaders: Record<string, string> = {
'Content-Type': 'application/json',
}
// Forward the API key for authentication
if (apiKey) {
executeHeaders['X-API-Key'] = apiKey
}
const executeResponse = await fetch(executeUrl, {
method: 'POST',
headers: executeHeaders,
body: JSON.stringify({
input: params.arguments || {},
triggerType: 'mcp',
}),
})
const executeResult = await executeResponse.json()
if (!executeResponse.ok) {
return NextResponse.json(
createJsonRpcError(
rpcRequest.id,
-32603,
executeResult.error || 'Workflow execution failed'
),
{ status: 500 }
)
}
// Format response for MCP
const content = [
{
type: 'text',
text: JSON.stringify(executeResult.output || executeResult, null, 2),
},
]
return NextResponse.json(
createJsonRpcResponse(rpcRequest.id, {
content,
isError: !executeResult.success,
})
)
} catch (error) {
logger.error('Error calling tool:', error)
return NextResponse.json(createJsonRpcError(rpcRequest.id, -32603, 'Tool execution failed'), {
status: 500,
})
}
}

View File

@@ -0,0 +1,197 @@
/**
* MCP SSE/HTTP Endpoint
*
* Implements MCP protocol using the official @modelcontextprotocol/sdk
* with a Next.js-compatible transport adapter.
*/
import { db } from '@sim/db'
import { workflowMcpServer } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { SSE_HEADERS } from '@/lib/core/utils/sse'
import { createLogger } from '@/lib/logs/console/logger'
import { createMcpSseStream, handleMcpRequest } from '@/lib/mcp/workflow-mcp-server'
const logger = createLogger('WorkflowMcpSSE')
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
interface RouteParams {
serverId: string
}
/**
* Validate that the server exists and is published
*/
async function validateServer(serverId: string) {
const [server] = await db
.select({
id: workflowMcpServer.id,
name: workflowMcpServer.name,
workspaceId: workflowMcpServer.workspaceId,
isPublished: workflowMcpServer.isPublished,
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.id, serverId))
.limit(1)
return server
}
/**
* GET - SSE endpoint for MCP protocol
* Establishes a Server-Sent Events connection for MCP notifications
*/
export async function GET(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { serverId } = await params
try {
// Validate server exists and is published
const server = await validateServer(serverId)
if (!server) {
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
if (!server.isPublished) {
return NextResponse.json({ error: 'Server is not published' }, { status: 403 })
}
// Check authentication
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const apiKey =
request.headers.get('X-API-Key') ||
request.headers.get('Authorization')?.replace('Bearer ', '')
// Create SSE stream using the SDK-based server
const stream = createMcpSseStream({
serverId,
serverName: server.name,
userId: auth.userId,
workspaceId: server.workspaceId,
apiKey,
})
return new NextResponse(stream, {
headers: {
...SSE_HEADERS,
'X-MCP-Server-Id': serverId,
'X-MCP-Server-Name': server.name,
},
})
} catch (error) {
logger.error('Error establishing SSE connection:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}
/**
* POST - Handle MCP JSON-RPC messages
* This is the primary endpoint for MCP protocol messages using the SDK
*/
export async function POST(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { serverId } = await params
try {
// Validate server
const server = await validateServer(serverId)
if (!server) {
return NextResponse.json(
{
jsonrpc: '2.0',
id: null,
error: { code: -32000, message: 'Server not found' },
},
{ status: 404 }
)
}
if (!server.isPublished) {
return NextResponse.json(
{
jsonrpc: '2.0',
id: null,
error: { code: -32000, message: 'Server is not published' },
},
{ status: 403 }
)
}
// Check authentication
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json(
{
jsonrpc: '2.0',
id: null,
error: { code: -32000, message: 'Unauthorized' },
},
{ status: 401 }
)
}
const apiKey =
request.headers.get('X-API-Key') ||
request.headers.get('Authorization')?.replace('Bearer ', '')
// Handle the request using the SDK-based server
return handleMcpRequest(
{
serverId,
serverName: server.name,
userId: auth.userId,
workspaceId: server.workspaceId,
apiKey,
},
request
)
} catch (error) {
logger.error('Error handling MCP POST request:', error)
return NextResponse.json(
{
jsonrpc: '2.0',
id: null,
error: { code: -32603, message: 'Internal error' },
},
{ status: 500 }
)
}
}
/**
* DELETE - Handle session termination
* MCP clients may send DELETE to end a session
*/
export async function DELETE(request: NextRequest, { params }: { params: Promise<RouteParams> }) {
const { serverId } = await params
try {
// Validate server exists
const server = await validateServer(serverId)
if (!server) {
return NextResponse.json({ error: 'Server not found' }, { status: 404 })
}
// Check authentication
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
logger.info(`MCP session terminated for server ${serverId}`)
return new NextResponse(null, { status: 204 })
} catch (error) {
logger.error('Error handling MCP DELETE request:', error)
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,150 @@
import { db } from '@sim/db'
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('WorkflowMcpServerPublishAPI')
export const dynamic = 'force-dynamic'
interface RouteParams {
id: string
}
/**
* POST - Publish a workflow MCP server (make it accessible via OAuth)
*/
export const POST = withMcpAuth<RouteParams>('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
logger.info(`[${requestId}] Publishing workflow MCP server: ${serverId}`)
const [existingServer] = await db
.select({ id: workflowMcpServer.id, isPublished: workflowMcpServer.isPublished })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!existingServer) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
if (existingServer.isPublished) {
return createMcpErrorResponse(
new Error('Server is already published'),
'Server is already published',
400
)
}
// Check if server has at least one tool
const tools = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
.where(eq(workflowMcpTool.serverId, serverId))
.limit(1)
if (tools.length === 0) {
return createMcpErrorResponse(
new Error(
'Cannot publish server without any tools. Add at least one workflow as a tool first.'
),
'Server has no tools',
400
)
}
const [updatedServer] = await db
.update(workflowMcpServer)
.set({
isPublished: true,
publishedAt: new Date(),
updatedAt: new Date(),
})
.where(eq(workflowMcpServer.id, serverId))
.returning()
const baseUrl = getBaseUrl()
const mcpServerUrl = `${baseUrl}/api/mcp/serve/${serverId}/sse`
logger.info(`[${requestId}] Successfully published workflow MCP server: ${serverId}`)
return createMcpSuccessResponse({
server: updatedServer,
mcpServerUrl,
message: 'Server published successfully. External MCP clients can now connect using OAuth.',
})
} catch (error) {
logger.error(`[${requestId}] Error publishing workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to publish workflow MCP server'),
'Failed to publish workflow MCP server',
500
)
}
}
)
/**
* DELETE - Unpublish a workflow MCP server
*/
export const DELETE = withMcpAuth<RouteParams>('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
logger.info(`[${requestId}] Unpublishing workflow MCP server: ${serverId}`)
const [existingServer] = await db
.select({ id: workflowMcpServer.id, isPublished: workflowMcpServer.isPublished })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!existingServer) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
if (!existingServer.isPublished) {
return createMcpErrorResponse(
new Error('Server is not published'),
'Server is not published',
400
)
}
const [updatedServer] = await db
.update(workflowMcpServer)
.set({
isPublished: false,
updatedAt: new Date(),
})
.where(eq(workflowMcpServer.id, serverId))
.returning()
logger.info(`[${requestId}] Successfully unpublished workflow MCP server: ${serverId}`)
return createMcpSuccessResponse({
server: updatedServer,
message: 'Server unpublished successfully. External MCP clients can no longer connect.',
})
} catch (error) {
logger.error(`[${requestId}] Error unpublishing workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to unpublish workflow MCP server'),
'Failed to unpublish workflow MCP server',
500
)
}
}
)

View File

@@ -0,0 +1,157 @@
import { db } from '@sim/db'
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('WorkflowMcpServerAPI')
export const dynamic = 'force-dynamic'
interface RouteParams {
id: string
}
/**
* GET - Get a specific workflow MCP server with its tools
*/
export const GET = withMcpAuth<RouteParams>('read')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
logger.info(`[${requestId}] Getting workflow MCP server: ${serverId}`)
const [server] = await db
.select({
id: workflowMcpServer.id,
workspaceId: workflowMcpServer.workspaceId,
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublished: workflowMcpServer.isPublished,
publishedAt: workflowMcpServer.publishedAt,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
})
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
const tools = await db
.select()
.from(workflowMcpTool)
.where(eq(workflowMcpTool.serverId, serverId))
logger.info(
`[${requestId}] Found workflow MCP server: ${server.name} with ${tools.length} tools`
)
return createMcpSuccessResponse({ server, tools })
} catch (error) {
logger.error(`[${requestId}] Error getting workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to get workflow MCP server'),
'Failed to get workflow MCP server',
500
)
}
}
)
/**
* PATCH - Update a workflow MCP server
*/
export const PATCH = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Updating workflow MCP server: ${serverId}`)
const [existingServer] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!existingServer) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
const updateData: Record<string, unknown> = {
updatedAt: new Date(),
}
if (body.name !== undefined) {
updateData.name = body.name.trim()
}
if (body.description !== undefined) {
updateData.description = body.description?.trim() || null
}
const [updatedServer] = await db
.update(workflowMcpServer)
.set(updateData)
.where(eq(workflowMcpServer.id, serverId))
.returning()
logger.info(`[${requestId}] Successfully updated workflow MCP server: ${serverId}`)
return createMcpSuccessResponse({ server: updatedServer })
} catch (error) {
logger.error(`[${requestId}] Error updating workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to update workflow MCP server'),
'Failed to update workflow MCP server',
500
)
}
}
)
/**
* DELETE - Delete a workflow MCP server and all its tools
*/
export const DELETE = withMcpAuth<RouteParams>('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
logger.info(`[${requestId}] Deleting workflow MCP server: ${serverId}`)
const [deletedServer] = await db
.delete(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.returning()
if (!deletedServer) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
logger.info(`[${requestId}] Successfully deleted workflow MCP server: ${serverId}`)
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to delete workflow MCP server'),
'Failed to delete workflow MCP server',
500
)
}
}
)

View File

@@ -0,0 +1,178 @@
import { db } from '@sim/db'
import { workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('WorkflowMcpToolAPI')
export const dynamic = 'force-dynamic'
interface RouteParams {
id: string
toolId: string
}
/**
* GET - Get a specific tool
*/
export const GET = withMcpAuth<RouteParams>('read')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId, toolId } = await params
logger.info(`[${requestId}] Getting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
const [tool] = await db
.select()
.from(workflowMcpTool)
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
.limit(1)
if (!tool) {
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
}
return createMcpSuccessResponse({ tool })
} catch (error) {
logger.error(`[${requestId}] Error getting tool:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to get tool'),
'Failed to get tool',
500
)
}
}
)
/**
* PATCH - Update a tool's configuration
*/
export const PATCH = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId, toolId } = await params
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Updating tool ${toolId} in server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
const [existingTool] = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
.limit(1)
if (!existingTool) {
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
}
const updateData: Record<string, unknown> = {
updatedAt: new Date(),
}
if (body.toolName !== undefined) {
updateData.toolName = body.toolName.trim()
}
if (body.toolDescription !== undefined) {
updateData.toolDescription = body.toolDescription?.trim() || null
}
if (body.parameterSchema !== undefined) {
updateData.parameterSchema = body.parameterSchema
}
if (body.isEnabled !== undefined) {
updateData.isEnabled = body.isEnabled
}
const [updatedTool] = await db
.update(workflowMcpTool)
.set(updateData)
.where(eq(workflowMcpTool.id, toolId))
.returning()
logger.info(`[${requestId}] Successfully updated tool ${toolId}`)
return createMcpSuccessResponse({ tool: updatedTool })
} catch (error) {
logger.error(`[${requestId}] Error updating tool:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to update tool'),
'Failed to update tool',
500
)
}
}
)
/**
* DELETE - Remove a tool from an MCP server
*/
export const DELETE = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId, toolId } = await params
logger.info(`[${requestId}] Deleting tool ${toolId} from server ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
const [deletedTool] = await db
.delete(workflowMcpTool)
.where(and(eq(workflowMcpTool.id, toolId), eq(workflowMcpTool.serverId, serverId)))
.returning()
if (!deletedTool) {
return createMcpErrorResponse(new Error('Tool not found'), 'Tool not found', 404)
}
logger.info(`[${requestId}] Successfully deleted tool ${toolId}`)
return createMcpSuccessResponse({ message: `Tool ${toolId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting tool:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to delete tool'),
'Failed to delete tool',
500
)
}
}
)

View File

@@ -0,0 +1,226 @@
import { db } from '@sim/db'
import { workflow, workflowMcpServer, workflowMcpTool } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { sanitizeToolName } from '@/lib/mcp/workflow-tool-schema'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
import { hasValidStartBlockInState } from '@/lib/workflows/triggers/trigger-utils'
const logger = createLogger('WorkflowMcpToolsAPI')
/**
* Check if a workflow has a valid start block by loading from database
*/
async function hasValidStartBlock(workflowId: string): Promise<boolean> {
try {
const normalizedData = await loadWorkflowFromNormalizedTables(workflowId)
return hasValidStartBlockInState(normalizedData)
} catch (error) {
logger.warn('Error checking for start block:', error)
return false
}
}
export const dynamic = 'force-dynamic'
interface RouteParams {
id: string
}
/**
* GET - List all tools for a workflow MCP server
*/
export const GET = withMcpAuth<RouteParams>('read')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
logger.info(`[${requestId}] Listing tools for workflow MCP server: ${serverId}`)
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Get tools with workflow details
const tools = await db
.select({
id: workflowMcpTool.id,
serverId: workflowMcpTool.serverId,
workflowId: workflowMcpTool.workflowId,
toolName: workflowMcpTool.toolName,
toolDescription: workflowMcpTool.toolDescription,
parameterSchema: workflowMcpTool.parameterSchema,
isEnabled: workflowMcpTool.isEnabled,
createdAt: workflowMcpTool.createdAt,
updatedAt: workflowMcpTool.updatedAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
isDeployed: workflow.isDeployed,
})
.from(workflowMcpTool)
.leftJoin(workflow, eq(workflowMcpTool.workflowId, workflow.id))
.where(eq(workflowMcpTool.serverId, serverId))
logger.info(`[${requestId}] Found ${tools.length} tools for server ${serverId}`)
return createMcpSuccessResponse({ tools })
} catch (error) {
logger.error(`[${requestId}] Error listing tools:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to list tools'),
'Failed to list tools',
500
)
}
}
)
/**
* POST - Add a workflow as a tool to an MCP server
*/
export const POST = withMcpAuth<RouteParams>('write')(
async (request: NextRequest, { userId, workspaceId, requestId }, { params }) => {
try {
const { id: serverId } = await params
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Adding tool to workflow MCP server: ${serverId}`, {
workflowId: body.workflowId,
})
if (!body.workflowId) {
return createMcpErrorResponse(
new Error('Missing required field: workflowId'),
'Missing required field',
400
)
}
// Verify server exists and belongs to workspace
const [server] = await db
.select({ id: workflowMcpServer.id })
.from(workflowMcpServer)
.where(
and(eq(workflowMcpServer.id, serverId), eq(workflowMcpServer.workspaceId, workspaceId))
)
.limit(1)
if (!server) {
return createMcpErrorResponse(new Error('Server not found'), 'Server not found', 404)
}
// Verify workflow exists and is deployed
const [workflowRecord] = await db
.select({
id: workflow.id,
name: workflow.name,
description: workflow.description,
isDeployed: workflow.isDeployed,
workspaceId: workflow.workspaceId,
})
.from(workflow)
.where(eq(workflow.id, body.workflowId))
.limit(1)
if (!workflowRecord) {
return createMcpErrorResponse(new Error('Workflow not found'), 'Workflow not found', 404)
}
// Verify workflow belongs to the same workspace
if (workflowRecord.workspaceId !== workspaceId) {
return createMcpErrorResponse(
new Error('Workflow does not belong to this workspace'),
'Access denied',
403
)
}
if (!workflowRecord.isDeployed) {
return createMcpErrorResponse(
new Error('Workflow must be deployed before adding as a tool'),
'Workflow not deployed',
400
)
}
// Verify workflow has a valid start block
const hasStartBlock = await hasValidStartBlock(body.workflowId)
if (!hasStartBlock) {
return createMcpErrorResponse(
new Error('Workflow must have a Start block to be used as an MCP tool'),
'No start block found',
400
)
}
// Check if tool already exists for this workflow
const [existingTool] = await db
.select({ id: workflowMcpTool.id })
.from(workflowMcpTool)
.where(
and(
eq(workflowMcpTool.serverId, serverId),
eq(workflowMcpTool.workflowId, body.workflowId)
)
)
.limit(1)
if (existingTool) {
return createMcpErrorResponse(
new Error('This workflow is already added as a tool to this server'),
'Tool already exists',
409
)
}
// Generate tool name and description
const toolName = body.toolName?.trim() || sanitizeToolName(workflowRecord.name)
const toolDescription =
body.toolDescription?.trim() ||
workflowRecord.description ||
`Execute ${workflowRecord.name} workflow`
// Create the tool
const toolId = crypto.randomUUID()
const [tool] = await db
.insert(workflowMcpTool)
.values({
id: toolId,
serverId,
workflowId: body.workflowId,
toolName,
toolDescription,
parameterSchema: body.parameterSchema || {},
isEnabled: true,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
logger.info(
`[${requestId}] Successfully added tool ${toolName} (workflow: ${body.workflowId}) to server ${serverId}`
)
return createMcpSuccessResponse({ tool }, 201)
} catch (error) {
logger.error(`[${requestId}] Error adding tool:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to add tool'),
'Failed to add tool',
500
)
}
}
)

View File

@@ -0,0 +1,107 @@
import { db } from '@sim/db'
import { workflowMcpServer } from '@sim/db/schema'
import { eq, sql } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('WorkflowMcpServersAPI')
export const dynamic = 'force-dynamic'
/**
* GET - List all workflow MCP servers for the workspace
*/
export const GET = withMcpAuth('read')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
logger.info(`[${requestId}] Listing workflow MCP servers for workspace ${workspaceId}`)
const servers = await db
.select({
id: workflowMcpServer.id,
workspaceId: workflowMcpServer.workspaceId,
createdBy: workflowMcpServer.createdBy,
name: workflowMcpServer.name,
description: workflowMcpServer.description,
isPublished: workflowMcpServer.isPublished,
publishedAt: workflowMcpServer.publishedAt,
createdAt: workflowMcpServer.createdAt,
updatedAt: workflowMcpServer.updatedAt,
toolCount: sql<number>`(
SELECT COUNT(*)::int
FROM "workflow_mcp_tool"
WHERE "workflow_mcp_tool"."server_id" = "workflow_mcp_server"."id"
)`.as('tool_count'),
})
.from(workflowMcpServer)
.where(eq(workflowMcpServer.workspaceId, workspaceId))
logger.info(
`[${requestId}] Listed ${servers.length} workflow MCP servers for workspace ${workspaceId}`
)
return createMcpSuccessResponse({ servers })
} catch (error) {
logger.error(`[${requestId}] Error listing workflow MCP servers:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to list workflow MCP servers'),
'Failed to list workflow MCP servers',
500
)
}
}
)
/**
* POST - Create a new workflow MCP server
*/
export const POST = withMcpAuth('write')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Creating workflow MCP server:`, {
name: body.name,
workspaceId,
})
if (!body.name) {
return createMcpErrorResponse(
new Error('Missing required field: name'),
'Missing required field',
400
)
}
const serverId = crypto.randomUUID()
const [server] = await db
.insert(workflowMcpServer)
.values({
id: serverId,
workspaceId,
createdBy: userId,
name: body.name.trim(),
description: body.description?.trim() || null,
isPublished: false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
logger.info(
`[${requestId}] Successfully created workflow MCP server: ${body.name} (ID: ${serverId})`
)
return createMcpSuccessResponse({ server }, 201)
} catch (error) {
logger.error(`[${requestId}] Error creating workflow MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to create workflow MCP server'),
'Failed to create workflow MCP server',
500
)
}
}
)

View File

@@ -3,10 +3,8 @@ import { memory, workflowBlocks } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { getWorkflowAccessContext } from '@/lib/workflows/utils'
const logger = createLogger('MemoryByIdAPI')
@@ -67,65 +65,6 @@ const memoryPutBodySchema = z.object({
workflowId: z.string().uuid('Invalid workflow ID format'),
})
/**
* Validates authentication and workflow access for memory operations
* @param request - The incoming request
* @param workflowId - The workflow ID to check access for
* @param requestId - Request ID for logging
* @param action - 'read' for GET, 'write' for PUT/DELETE
* @returns Object with userId if successful, or error response if failed
*/
async function validateMemoryAccess(
request: NextRequest,
workflowId: string,
requestId: string,
action: 'read' | 'write'
): Promise<{ userId: string } | { error: NextResponse }> {
const authResult = await checkHybridAuth(request, {
requireWorkflowId: false,
})
if (!authResult.success || !authResult.userId) {
logger.warn(`[${requestId}] Unauthorized memory ${action} attempt`)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Authentication required' } },
{ status: 401 }
),
}
}
const accessContext = await getWorkflowAccessContext(workflowId, authResult.userId)
if (!accessContext) {
logger.warn(`[${requestId}] Workflow ${workflowId} not found`)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Workflow not found' } },
{ status: 404 }
),
}
}
const { isOwner, workspacePermission } = accessContext
const hasAccess =
action === 'read'
? isOwner || workspacePermission !== null
: isOwner || workspacePermission === 'write' || workspacePermission === 'admin'
if (!hasAccess) {
logger.warn(
`[${requestId}] User ${authResult.userId} denied ${action} access to workflow ${workflowId}`
)
return {
error: NextResponse.json(
{ success: false, error: { message: 'Access denied' } },
{ status: 403 }
),
}
}
return { userId: authResult.userId }
}
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
@@ -162,11 +101,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
const { workflowId: validatedWorkflowId } = validation.data
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'read')
if ('error' in accessCheck) {
return accessCheck.error
}
const memories = await db
.select()
.from(memory)
@@ -269,11 +203,6 @@ export async function DELETE(
const { workflowId: validatedWorkflowId } = validation.data
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
if ('error' in accessCheck) {
return accessCheck.error
}
const existingMemory = await db
.select({ id: memory.id })
.from(memory)
@@ -367,11 +296,6 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
)
}
const accessCheck = await validateMemoryAccess(request, validatedWorkflowId, requestId, 'write')
if ('error' in accessCheck) {
return accessCheck.error
}
const existingMemories = await db
.select()
.from(memory)

View File

@@ -28,7 +28,7 @@ const updateInvitationSchema = z.object({
// Get invitation details
export async function GET(
_request: NextRequest,
_req: NextRequest,
{ params }: { params: Promise<{ id: string; invitationId: string }> }
) {
const { id: organizationId, invitationId } = await params

View File

@@ -1,19 +1,16 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { templates, user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { verifySuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateApprovalAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/approve - Approve a template (super users only)
*/
// POST /api/templates/[id]/approve - Approve a template (super users only)
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -25,18 +22,23 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to approve template: ${id}`)
return NextResponse.json({ error: 'Only super users can approve templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for approval: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to approved
await db
.update(templates)
.set({ status: 'approved', updatedAt: new Date() })
@@ -54,11 +56,9 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
}
}
/**
* DELETE /api/templates/[id]/approve - Unapprove a template (super users only)
*/
// POST /api/templates/[id]/reject - Reject a template (super users only)
export async function DELETE(
_request: NextRequest,
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = generateRequestId()
@@ -71,18 +71,23 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to rejected
await db
.update(templates)
.set({ status: 'rejected', updatedAt: new Date() })

View File

@@ -1,142 +0,0 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { getBaseUrl } from '@/lib/core/utils/urls'
import { createLogger } from '@/lib/logs/console/logger'
import { verifyTemplateOwnership } from '@/lib/templates/permissions'
import { uploadFile } from '@/lib/uploads/core/storage-service'
import { isValidPng } from '@/lib/uploads/utils/validation'
const logger = createLogger('TemplateOGImageAPI')
/**
* PUT /api/templates/[id]/og-image
* Upload a pre-generated OG image for a template.
* Accepts base64-encoded image data in the request body.
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthorized OG image upload attempt for template: ${id}`)
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { authorized, error, status } = await verifyTemplateOwnership(
id,
session.user.id,
'admin'
)
if (!authorized) {
logger.warn(`[${requestId}] User denied permission to upload OG image for template ${id}`)
return NextResponse.json({ error }, { status: status || 403 })
}
const body = await request.json()
const { imageData } = body
if (!imageData || typeof imageData !== 'string') {
return NextResponse.json(
{ error: 'Missing or invalid imageData (expected base64 string)' },
{ status: 400 }
)
}
const base64Data = imageData.includes(',') ? imageData.split(',')[1] : imageData
const imageBuffer = Buffer.from(base64Data, 'base64')
if (!isValidPng(imageBuffer)) {
return NextResponse.json({ error: 'Invalid PNG image data' }, { status: 400 })
}
const maxSize = 5 * 1024 * 1024
if (imageBuffer.length > maxSize) {
return NextResponse.json({ error: 'Image too large. Maximum size is 5MB.' }, { status: 400 })
}
const timestamp = Date.now()
const storageKey = `og-images/templates/${id}/${timestamp}.png`
logger.info(`[${requestId}] Uploading OG image for template ${id}: ${storageKey}`)
const uploadResult = await uploadFile({
file: imageBuffer,
fileName: storageKey,
contentType: 'image/png',
context: 'og-images',
preserveKey: true,
customKey: storageKey,
})
const baseUrl = getBaseUrl()
const ogImageUrl = `${baseUrl}${uploadResult.path}?context=og-images`
await db
.update(templates)
.set({
ogImageUrl,
updatedAt: new Date(),
})
.where(eq(templates.id, id))
logger.info(`[${requestId}] Successfully uploaded OG image for template ${id}: ${ogImageUrl}`)
return NextResponse.json({
success: true,
ogImageUrl,
})
} catch (error: unknown) {
logger.error(`[${requestId}] Error uploading OG image for template ${id}:`, error)
return NextResponse.json({ error: 'Failed to upload OG image' }, { status: 500 })
}
}
/**
* DELETE /api/templates/[id]/og-image
* Remove the OG image for a template.
*/
export async function DELETE(
_request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = generateRequestId()
const { id } = await params
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { authorized, error, status } = await verifyTemplateOwnership(
id,
session.user.id,
'admin'
)
if (!authorized) {
logger.warn(`[${requestId}] User denied permission to delete OG image for template ${id}`)
return NextResponse.json({ error }, { status: status || 403 })
}
await db
.update(templates)
.set({
ogImageUrl: null,
updatedAt: new Date(),
})
.where(eq(templates.id, id))
logger.info(`[${requestId}] Removed OG image for template ${id}`)
return NextResponse.json({ success: true })
} catch (error: unknown) {
logger.error(`[${requestId}] Error removing OG image for template ${id}:`, error)
return NextResponse.json({ error: 'Failed to remove OG image' }, { status: 500 })
}
}

View File

@@ -1,19 +1,16 @@
import { db } from '@sim/db'
import { templates } from '@sim/db/schema'
import { templates, user } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { verifySuperUser } from '@/lib/templates/permissions'
const logger = createLogger('TemplateRejectionAPI')
export const revalidate = 0
/**
* POST /api/templates/[id]/reject - Reject a template (super users only)
*/
// POST /api/templates/[id]/reject - Reject a template (super users only)
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -25,18 +22,23 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const { isSuperUser } = await verifySuperUser(session.user.id)
if (!isSuperUser) {
// Check if user is a super user
const currentUser = await db.select().from(user).where(eq(user.id, session.user.id)).limit(1)
if (!currentUser[0]?.isSuperUser) {
logger.warn(`[${requestId}] Non-super user attempted to reject template: ${id}`)
return NextResponse.json({ error: 'Only super users can reject templates' }, { status: 403 })
}
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
logger.warn(`[${requestId}] Template not found for rejection: ${id}`)
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Update template status to rejected
await db
.update(templates)
.set({ status: 'rejected', updatedAt: new Date() })

View File

@@ -1,6 +1,6 @@
import { db } from '@sim/db'
import { templateCreators, templates, workflow } from '@sim/db/schema'
import { eq, sql } from 'drizzle-orm'
import { member, templateCreators, templates, workflow } from '@sim/db/schema'
import { and, eq, or, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
@@ -15,6 +15,7 @@ const logger = createLogger('TemplateByIdAPI')
export const revalidate = 0
// GET /api/templates/[id] - Retrieve a single template by ID
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = generateRequestId()
const { id } = await params
@@ -24,6 +25,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Fetching template: ${id}`)
// Fetch the template by ID with creator info
const result = await db
.select({
template: templates,
@@ -45,10 +47,12 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
creator: creator || undefined,
}
// Only show approved templates to non-authenticated users
if (!session?.user?.id && template.status !== 'approved') {
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
// Check if user has starred (only if authenticated)
let isStarred = false
if (session?.user?.id) {
const { templateStars } = await import('@sim/db/schema')
@@ -76,6 +80,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
logger.debug(`[${requestId}] Incremented view count for template: ${id}`)
} catch (viewError) {
// Log the error but don't fail the request
logger.warn(`[${requestId}] Failed to increment view count for template: ${id}`, viewError)
}
}
@@ -133,6 +138,7 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
const { name, details, creatorId, tags, updateState } = validationResult.data
// Check if template exists
const existingTemplate = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existingTemplate.length === 0) {
@@ -140,54 +146,32 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
return NextResponse.json({ error: 'Template not found' }, { status: 404 })
}
const template = existingTemplate[0]
if (!template.creatorId) {
logger.warn(`[${requestId}] Template ${id} has no creator, denying update`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
template.creatorId,
'admin'
)
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to update template ${id}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
}
// No permission check needed - template updates only happen from within the workspace
// where the user is already editing the connected workflow
// Prepare update data - only include fields that were provided
const updateData: any = {
updatedAt: new Date(),
}
// Only update fields that were provided
if (name !== undefined) updateData.name = name
if (details !== undefined) updateData.details = details
if (tags !== undefined) updateData.tags = tags
if (creatorId !== undefined) updateData.creatorId = creatorId
if (updateState && template.workflowId) {
const { verifyWorkflowAccess } = await import('@/socket-server/middleware/permissions')
const { hasAccess: hasWorkflowAccess } = await verifyWorkflowAccess(
session.user.id,
template.workflowId
)
if (!hasWorkflowAccess) {
logger.warn(`[${requestId}] User denied workflow access for state sync on template ${id}`)
return NextResponse.json({ error: 'Access denied to workflow' }, { status: 403 })
}
// Only update the state if explicitly requested and the template has a connected workflow
if (updateState && existingTemplate[0].workflowId) {
// Load the current workflow state from normalized tables
const { loadWorkflowFromNormalizedTables } = await import('@/lib/workflows/persistence/utils')
const normalizedData = await loadWorkflowFromNormalizedTables(template.workflowId)
const normalizedData = await loadWorkflowFromNormalizedTables(existingTemplate[0].workflowId)
if (normalizedData) {
// Also fetch workflow variables
const [workflowRecord] = await db
.select({ variables: workflow.variables })
.from(workflow)
.where(eq(workflow.id, template.workflowId))
.where(eq(workflow.id, existingTemplate[0].workflowId))
.limit(1)
const currentState = {
@@ -199,15 +183,17 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
lastSaved: Date.now(),
}
// Extract credential requirements from the new state
const requiredCredentials = extractRequiredCredentials(currentState)
// Sanitize the state before storing
const sanitizedState = sanitizeCredentials(currentState)
updateData.state = sanitizedState
updateData.requiredCredentials = requiredCredentials
logger.info(
`[${requestId}] Updating template state and credentials from current workflow: ${template.workflowId}`
`[${requestId}] Updating template state and credentials from current workflow: ${existingTemplate[0].workflowId}`
)
} else {
logger.warn(`[${requestId}] Could not load workflow state for template: ${id}`)
@@ -247,6 +233,7 @@ export async function DELETE(
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
// Fetch template
const existing = await db.select().from(templates).where(eq(templates.id, id)).limit(1)
if (existing.length === 0) {
logger.warn(`[${requestId}] Template not found for delete: ${id}`)
@@ -255,21 +242,41 @@ export async function DELETE(
const template = existing[0]
if (!template.creatorId) {
logger.warn(`[${requestId}] Template ${id} has no creator, denying delete`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
// Permission: Only admin/owner of creator profile can delete
if (template.creatorId) {
const creatorProfile = await db
.select()
.from(templateCreators)
.where(eq(templateCreators.id, template.creatorId))
.limit(1)
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
template.creatorId,
'admin'
)
if (creatorProfile.length > 0) {
const creator = creatorProfile[0]
let hasPermission = false
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
if (creator.referenceType === 'user') {
hasPermission = creator.referenceId === session.user.id
} else if (creator.referenceType === 'organization') {
// For delete, require admin/owner role
const membership = await db
.select()
.from(member)
.where(
and(
eq(member.userId, session.user.id),
eq(member.organizationId, creator.referenceId),
or(eq(member.role, 'admin'), eq(member.role, 'owner'))
)
)
.limit(1)
hasPermission = membership.length > 0
}
if (!hasPermission) {
logger.warn(`[${requestId}] User denied permission to delete template ${id}`)
return NextResponse.json({ error: 'Access denied' }, { status: 403 })
}
}
}
await db.delete(templates).where(eq(templates.id, id))

View File

@@ -1,5 +1,6 @@
import { db } from '@sim/db'
import {
member,
templateCreators,
templateStars,
templates,
@@ -203,18 +204,51 @@ export async function POST(request: NextRequest) {
return NextResponse.json({ error: 'Workflow not found' }, { status: 404 })
}
const { verifyCreatorPermission } = await import('@/lib/templates/permissions')
const { hasPermission, error: permissionError } = await verifyCreatorPermission(
session.user.id,
data.creatorId,
'member'
)
// Validate creator profile - required for all templates
const creatorProfile = await db
.select()
.from(templateCreators)
.where(eq(templateCreators.id, data.creatorId))
.limit(1)
if (!hasPermission) {
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
return NextResponse.json({ error: permissionError || 'Access denied' }, { status: 403 })
if (creatorProfile.length === 0) {
logger.warn(`[${requestId}] Creator profile not found: ${data.creatorId}`)
return NextResponse.json({ error: 'Creator profile not found' }, { status: 404 })
}
const creator = creatorProfile[0]
// Verify user has permission to use this creator profile
if (creator.referenceType === 'user') {
if (creator.referenceId !== session.user.id) {
logger.warn(`[${requestId}] User cannot use creator profile: ${data.creatorId}`)
return NextResponse.json(
{ error: 'You do not have permission to use this creator profile' },
{ status: 403 }
)
}
} else if (creator.referenceType === 'organization') {
// Verify user is a member of the organization
const membership = await db
.select()
.from(member)
.where(
and(eq(member.userId, session.user.id), eq(member.organizationId, creator.referenceId))
)
.limit(1)
if (membership.length === 0) {
logger.warn(
`[${requestId}] User not a member of organization for creator: ${data.creatorId}`
)
return NextResponse.json(
{ error: 'You must be a member of the organization to use its creator profile' },
{ status: 403 }
)
}
}
// Create the template
const templateId = uuidv4()
const now = new Date()

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -109,14 +108,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
if (folderId) {
const folderIdValidation = validateAlphanumericId(folderId, 'folderId', 50)
if (!folderIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid folderId`, { error: folderIdValidation.error })
return NextResponse.json({ error: folderIdValidation.error }, { status: 400 })
}
}
const qParts: string[] = ['trashed = false']
if (folderId) {
qParts.push(`'${escapeForDriveQuery(folderId)}' in parents`)

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
@@ -51,29 +50,6 @@ export async function POST(request: NextRequest) {
.map((id) => id.trim())
.filter((id) => id.length > 0)
for (const labelId of labelIds) {
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
return NextResponse.json(
{
success: false,
error: labelIdValidation.error,
},
{ status: 400 }
)
}
}
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
if (!messageIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
return NextResponse.json(
{ success: false, error: messageIdValidation.error },
{ status: 400 }
)
}
const gmailResponse = await fetch(
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
{

View File

@@ -3,7 +3,6 @@ import { account } from '@sim/db/schema'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -39,12 +38,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId', 255)
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID: ${credentialIdValidation.error}`)
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
let credentials = await db
.select()
.from(account)

View File

@@ -1,7 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
@@ -54,29 +53,6 @@ export async function POST(request: NextRequest) {
.map((id) => id.trim())
.filter((id) => id.length > 0)
for (const labelId of labelIds) {
const labelIdValidation = validateAlphanumericId(labelId, 'labelId', 255)
if (!labelIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid label ID: ${labelIdValidation.error}`)
return NextResponse.json(
{
success: false,
error: labelIdValidation.error,
},
{ status: 400 }
)
}
}
const messageIdValidation = validateAlphanumericId(validatedData.messageId, 'messageId', 255)
if (!messageIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid message ID: ${messageIdValidation.error}`)
return NextResponse.json(
{ success: false, error: messageIdValidation.error },
{ status: 400 }
)
}
const gmailResponse = await fetch(
`${GMAIL_API_BASE}/messages/${validatedData.messageId}/modify`,
{

View File

@@ -1,6 +1,5 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateUUID } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -26,6 +25,7 @@ export async function GET(request: NextRequest) {
logger.info(`[${requestId}] Google Calendar calendars request received`)
try {
// Get the credential ID from the query params
const { searchParams } = new URL(request.url)
const credentialId = searchParams.get('credentialId')
const workflowId = searchParams.get('workflowId') || undefined
@@ -34,25 +34,12 @@ export async function GET(request: NextRequest) {
logger.warn(`[${requestId}] Missing credentialId parameter`)
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialValidation = validateUUID(credentialId, 'credentialId')
if (!credentialValidation.isValid) {
logger.warn(`[${requestId}] Invalid credentialId format`, { credentialId })
return NextResponse.json({ error: credentialValidation.error }, { status: 400 })
}
if (workflowId) {
const workflowValidation = validateUUID(workflowId, 'workflowId')
if (!workflowValidation.isValid) {
logger.warn(`[${requestId}] Invalid workflowId format`, { workflowId })
return NextResponse.json({ error: workflowValidation.error }, { status: 400 })
}
}
const authz = await authorizeCredentialUse(request, { credentialId, workflowId })
if (!authz.ok || !authz.credentialOwnerUserId) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(
credentialId,
authz.credentialOwnerUserId,
@@ -63,6 +50,7 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Fetch calendars from Google Calendar API
logger.info(`[${requestId}] Fetching calendars from Google Calendar API`)
const calendarResponse = await fetch(
'https://www.googleapis.com/calendar/v3/users/me/calendarList',
@@ -93,6 +81,7 @@ export async function GET(request: NextRequest) {
const data = await calendarResponse.json()
const calendars: CalendarListItem[] = data.items || []
// Sort calendars with primary first, then alphabetically
calendars.sort((a, b) => {
if (a.primary && !b.primary) return -1
if (!a.primary && b.primary) return 1

View File

@@ -20,12 +20,6 @@ export async function POST(request: Request) {
cloudId: providedCloudId,
issueType,
parent,
labels,
duedate,
reporter,
environment,
customFieldId,
customFieldValue,
} = await request.json()
if (!domain) {
@@ -100,57 +94,17 @@ export async function POST(request: Request) {
}
if (priority !== undefined && priority !== null && priority !== '') {
const isNumericId = /^\d+$/.test(priority)
fields.priority = isNumericId ? { id: priority } : { name: priority }
}
if (labels !== undefined && labels !== null && Array.isArray(labels) && labels.length > 0) {
fields.labels = labels
}
if (duedate !== undefined && duedate !== null && duedate !== '') {
fields.duedate = duedate
}
if (reporter !== undefined && reporter !== null && reporter !== '') {
fields.reporter = {
id: reporter,
fields.priority = {
name: priority,
}
}
if (environment !== undefined && environment !== null && environment !== '') {
fields.environment = {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: environment,
},
],
},
],
if (assignee !== undefined && assignee !== null && assignee !== '') {
fields.assignee = {
id: assignee,
}
}
if (
customFieldId !== undefined &&
customFieldId !== null &&
customFieldId !== '' &&
customFieldValue !== undefined &&
customFieldValue !== null &&
customFieldValue !== ''
) {
const fieldId = customFieldId.startsWith('customfield_')
? customFieldId
: `customfield_${customFieldId}`
fields[fieldId] = customFieldValue
}
const body = { fields }
const response = await fetch(url, {
@@ -178,47 +132,16 @@ export async function POST(request: Request) {
}
const responseData = await response.json()
const issueKey = responseData.key || 'unknown'
logger.info('Successfully created Jira issue:', issueKey)
let assigneeId: string | undefined
if (assignee !== undefined && assignee !== null && assignee !== '') {
const assignUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}/assignee`
logger.info('Assigning issue to:', assignee)
const assignResponse = await fetch(assignUrl, {
method: 'PUT',
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
'Content-Type': 'application/json',
},
body: JSON.stringify({
accountId: assignee,
}),
})
if (!assignResponse.ok) {
const assignErrorText = await assignResponse.text()
logger.warn('Failed to assign issue (issue was created successfully):', {
status: assignResponse.status,
error: assignErrorText,
})
} else {
assigneeId = assignee
logger.info('Successfully assigned issue to:', assignee)
}
}
logger.info('Successfully created Jira issue:', responseData.key)
return NextResponse.json({
success: true,
output: {
ts: new Date().toISOString(),
issueKey: issueKey,
issueKey: responseData.key || 'unknown',
summary: responseData.fields?.summary || 'Issue created',
success: true,
url: `https://${domain}/browse/${issueKey}`,
...(assigneeId && { assigneeId }),
url: `https://${domain}/browse/${responseData.key}`,
},
})
} catch (error: any) {

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -24,12 +23,6 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Team ID is required' }, { status: 400 })
}
const teamIdValidation = validateMicrosoftGraphId(teamId, 'Team ID')
if (!teamIdValidation.isValid) {
logger.warn('Invalid team ID provided', { teamId, error: teamIdValidation.error })
return NextResponse.json({ error: teamIdValidation.error }, { status: 400 })
}
try {
const authz = await authorizeCredentialUse(request as any, {
credentialId: credential,
@@ -77,6 +70,7 @@ export async function POST(request: Request) {
endpoint: `https://graph.microsoft.com/v1.0/teams/${teamId}/channels`,
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -99,6 +93,7 @@ export async function POST(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -1,6 +1,5 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -8,35 +7,21 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('TeamsChatsAPI')
/**
* Helper function to get chat members and create a meaningful name
*
* @param chatId - Microsoft Teams chat ID to get display name for
* @param accessToken - Access token for Microsoft Graph API
* @param chatTopic - Optional existing chat topic
* @returns A meaningful display name for the chat
*/
// Helper function to get chat members and create a meaningful name
const getChatDisplayName = async (
chatId: string,
accessToken: string,
chatTopic?: string
): Promise<string> => {
try {
const chatIdValidation = validateMicrosoftGraphId(chatId, 'chatId')
if (!chatIdValidation.isValid) {
logger.warn('Invalid chat ID in getChatDisplayName', {
error: chatIdValidation.error,
chatId: chatId.substring(0, 50),
})
return `Chat ${chatId.substring(0, 8)}...`
}
// If the chat already has a topic, use it
if (chatTopic?.trim() && chatTopic !== 'null') {
return chatTopic
}
// Fetch chat members to create a meaningful name
const membersResponse = await fetch(
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/members`,
`https://graph.microsoft.com/v1.0/chats/${chatId}/members`,
{
method: 'GET',
headers: {
@@ -50,25 +35,27 @@ const getChatDisplayName = async (
const membersData = await membersResponse.json()
const members = membersData.value || []
// Filter out the current user and get display names
const memberNames = members
.filter((member: any) => member.displayName && member.displayName !== 'Unknown')
.map((member: any) => member.displayName)
.slice(0, 3)
.slice(0, 3) // Limit to first 3 names to avoid very long names
if (memberNames.length > 0) {
if (memberNames.length === 1) {
return memberNames[0]
return memberNames[0] // 1:1 chat
}
if (memberNames.length === 2) {
return memberNames.join(' & ')
return memberNames.join(' & ') // 2-person group
}
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more`
return `${memberNames.slice(0, 2).join(', ')} & ${memberNames.length - 2} more` // Larger group
}
}
// Fallback: try to get a better name from recent messages
try {
const messagesResponse = await fetch(
`https://graph.microsoft.com/v1.0/chats/${encodeURIComponent(chatId)}/messages?$top=10&$orderby=createdDateTime desc`,
`https://graph.microsoft.com/v1.0/chats/${chatId}/messages?$top=10&$orderby=createdDateTime desc`,
{
method: 'GET',
headers: {
@@ -82,12 +69,14 @@ const getChatDisplayName = async (
const messagesData = await messagesResponse.json()
const messages = messagesData.value || []
// Look for chat rename events
for (const message of messages) {
if (message.eventDetail?.chatDisplayName) {
return message.eventDetail.chatDisplayName
}
}
// Get unique sender names from recent messages as last resort
const senderNames = [
...new Set(
messages
@@ -114,6 +103,7 @@ const getChatDisplayName = async (
)
}
// Final fallback
return `Chat ${chatId.split(':')[0] || chatId.substring(0, 8)}...`
} catch (error) {
logger.warn(
@@ -156,6 +146,7 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Could not retrieve access token' }, { status: 401 })
}
// Now try to fetch the chats
const response = await fetch('https://graph.microsoft.com/v1.0/me/chats', {
method: 'GET',
headers: {
@@ -172,6 +163,7 @@ export async function POST(request: Request) {
endpoint: 'https://graph.microsoft.com/v1.0/me/chats',
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -187,6 +179,7 @@ export async function POST(request: Request) {
const data = await response.json()
// Process chats with enhanced display names
const chats = await Promise.all(
data.value.map(async (chat: any) => ({
id: chat.id,
@@ -200,6 +193,7 @@ export async function POST(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -30,41 +30,23 @@ export async function createMongoDBConnection(config: MongoDBConnectionConfig) {
return client
}
/**
* Recursively checks an object for dangerous MongoDB operators
* @param obj - The object to check
* @param dangerousOperators - Array of operator names to block
* @returns true if a dangerous operator is found
*/
function containsDangerousOperator(obj: unknown, dangerousOperators: string[]): boolean {
if (typeof obj !== 'object' || obj === null) return false
for (const key of Object.keys(obj as Record<string, unknown>)) {
if (dangerousOperators.includes(key)) return true
if (
typeof (obj as Record<string, unknown>)[key] === 'object' &&
containsDangerousOperator((obj as Record<string, unknown>)[key], dangerousOperators)
) {
return true
}
}
return false
}
export function validateFilter(filter: string): { isValid: boolean; error?: string } {
try {
const parsed = JSON.parse(filter)
const dangerousOperators = [
'$where', // Executes arbitrary JavaScript
'$regex', // Can cause ReDoS attacks
'$expr', // Expression evaluation
'$function', // Custom JavaScript functions
'$accumulator', // Custom JavaScript accumulators
'$let', // Variable definitions that could be exploited
]
const dangerousOperators = ['$where', '$regex', '$expr', '$function', '$accumulator', '$let']
if (containsDangerousOperator(parsed, dangerousOperators)) {
const checkForDangerousOps = (obj: any): boolean => {
if (typeof obj !== 'object' || obj === null) return false
for (const key of Object.keys(obj)) {
if (dangerousOperators.includes(key)) return true
if (typeof obj[key] === 'object' && checkForDangerousOps(obj[key])) return true
}
return false
}
if (checkForDangerousOps(parsed)) {
return {
isValid: false,
error: 'Filter contains potentially dangerous operators',
@@ -92,19 +74,29 @@ export function validatePipeline(pipeline: string): { isValid: boolean; error?:
}
const dangerousOperators = [
'$where', // Executes arbitrary JavaScript
'$function', // Custom JavaScript functions
'$accumulator', // Custom JavaScript accumulators
'$let', // Variable definitions that could be exploited
'$merge', // Writes to external collections
'$out', // Writes to external collections
'$currentOp', // Exposes system operation info
'$listSessions', // Exposes session info
'$listLocalSessions', // Exposes local session info
'$where',
'$function',
'$accumulator',
'$let',
'$merge',
'$out',
'$currentOp',
'$listSessions',
'$listLocalSessions',
]
const checkPipelineStage = (stage: any): boolean => {
if (typeof stage !== 'object' || stage === null) return false
for (const key of Object.keys(stage)) {
if (dangerousOperators.includes(key)) return true
if (typeof stage[key] === 'object' && checkPipelineStage(stage[key])) return true
}
return false
}
for (const stage of parsed) {
if (containsDangerousOperator(stage, dangerousOperators)) {
if (checkPipelineStage(stage)) {
return {
isValid: false,
error: 'Pipeline contains potentially dangerous operators',

View File

@@ -98,45 +98,15 @@ export function buildDeleteQuery(table: string, where: string) {
return { query, values: [] }
}
/**
* Validates a WHERE clause to prevent SQL injection attacks
* @param where - The WHERE clause string to validate
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
*/
function validateWhereClause(where: string): void {
const dangerousPatterns = [
// DDL and DML injection via stacked queries
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
// Union-based injection
/union\s+(all\s+)?select/i,
// File operations
/union\s+select/i,
/into\s+outfile/i,
/into\s+dumpfile/i,
/load_file\s*\(/i,
// Comment-based injection (can truncate query)
/load_file/i,
/--/,
/\/\*/,
/\*\//,
// Tautologies - always true/false conditions using backreferences
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\bor\s+true\b/i,
/\bor\s+false\b/i,
// AND tautologies (less common but still used in attacks)
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\band\s+true\b/i,
/\band\s+false\b/i,
// Time-based blind injection
/\bsleep\s*\(/i,
/\bbenchmark\s*\(/i,
/\bwaitfor\s+delay/i,
// Stacked queries (any statement after semicolon)
/;\s*\w+/,
// Information schema queries
/information_schema/i,
/mysql\./i,
// System functions and procedures
/\bxp_cmdshell/i,
]
for (const pattern of dangerousPatterns) {

View File

@@ -4,7 +4,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -37,12 +36,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
logger.info(`[${requestId}] Fetching credential`, { credentialId })
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)

View File

@@ -4,7 +4,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -34,12 +33,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateMicrosoftGraphId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid credential ID`, { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
const credentials = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!credentials.length) {
return NextResponse.json({ error: 'Credential not found' }, { status: 404 })
@@ -55,6 +48,7 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Build URL for OneDrive folders
let url = `https://graph.microsoft.com/v1.0/me/drive/root/children?$filter=folder ne null&$select=id,name,folder,webUrl,createdDateTime,lastModifiedDateTime&$top=50`
if (query) {
@@ -77,7 +71,7 @@ export async function GET(request: NextRequest) {
const data = await response.json()
const folders = (data.value || [])
.filter((item: MicrosoftGraphDriveItem) => item.folder)
.filter((item: MicrosoftGraphDriveItem) => item.folder) // Only folders
.map((folder: MicrosoftGraphDriveItem) => ({
id: folder.id,
name: folder.name,

View File

@@ -2,7 +2,6 @@ import { type NextRequest, NextResponse } from 'next/server'
import * as XLSX from 'xlsx'
import { z } from 'zod'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { validateMicrosoftGraphId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import {
@@ -29,9 +28,9 @@ const ExcelValuesSchema = z.union([
const OneDriveUploadSchema = z.object({
accessToken: z.string().min(1, 'Access token is required'),
fileName: z.string().min(1, 'File name is required'),
file: z.any().optional(),
file: z.any().optional(), // UserFile object (optional for blank Excel creation)
folderId: z.string().optional().nullable(),
mimeType: z.string().nullish(),
mimeType: z.string().nullish(), // Accept string, null, or undefined
values: ExcelValuesSchema.optional().nullable(),
})
@@ -63,19 +62,24 @@ export async function POST(request: NextRequest) {
let fileBuffer: Buffer
let mimeType: string
// Check if we're creating a blank Excel file
const isExcelCreation =
validatedData.mimeType ===
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' && !validatedData.file
if (isExcelCreation) {
// Create a blank Excel workbook
const workbook = XLSX.utils.book_new()
const worksheet = XLSX.utils.aoa_to_sheet([[]])
XLSX.utils.book_append_sheet(workbook, worksheet, 'Sheet1')
// Generate XLSX file as buffer
const xlsxBuffer = XLSX.write(workbook, { type: 'buffer', bookType: 'xlsx' })
fileBuffer = Buffer.from(xlsxBuffer)
mimeType = 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
} else {
// Handle regular file upload
const rawFile = validatedData.file
if (!rawFile) {
@@ -104,6 +108,7 @@ export async function POST(request: NextRequest) {
fileToProcess = rawFile
}
// Convert to UserFile format
let userFile
try {
userFile = processSingleFileToUserFile(fileToProcess, requestId, logger)
@@ -133,7 +138,7 @@ export async function POST(request: NextRequest) {
mimeType = userFile.type || 'application/octet-stream'
}
const maxSize = 250 * 1024 * 1024
const maxSize = 250 * 1024 * 1024 // 250MB
if (fileBuffer.length > maxSize) {
const sizeMB = (fileBuffer.length / (1024 * 1024)).toFixed(2)
logger.warn(`[${requestId}] File too large: ${sizeMB}MB`)
@@ -146,6 +151,7 @@ export async function POST(request: NextRequest) {
)
}
// Ensure file name has an appropriate extension
let fileName = validatedData.fileName
const hasExtension = fileName.includes('.') && fileName.lastIndexOf('.') > 0
@@ -163,17 +169,6 @@ export async function POST(request: NextRequest) {
const folderId = validatedData.folderId?.trim()
if (folderId && folderId !== '') {
const folderIdValidation = validateMicrosoftGraphId(folderId, 'folderId')
if (!folderIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid folder ID`, { error: folderIdValidation.error })
return NextResponse.json(
{
success: false,
error: folderIdValidation.error,
},
{ status: 400 }
)
}
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(folderId)}:/${encodeURIComponent(fileName)}:/content`
} else {
uploadUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/root:/${encodeURIComponent(fileName)}:/content`
@@ -202,12 +197,14 @@ export async function POST(request: NextRequest) {
const fileData = await uploadResponse.json()
// If this is an Excel creation and values were provided, write them using the Excel API
let excelWriteResult: any | undefined
const shouldWriteExcelContent =
isExcelCreation && Array.isArray(excelValues) && excelValues.length > 0
if (shouldWriteExcelContent) {
try {
// Create a workbook session to ensure reliability and persistence of changes
let workbookSessionId: string | undefined
const sessionResp = await fetch(
`${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(fileData.id)}/workbook/createSession`,
@@ -226,6 +223,7 @@ export async function POST(request: NextRequest) {
workbookSessionId = sessionData?.id
}
// Determine the first worksheet name
let sheetName = 'Sheet1'
try {
const listUrl = `${MICROSOFT_GRAPH_BASE}/me/drive/items/${encodeURIComponent(
@@ -274,6 +272,7 @@ export async function POST(request: NextRequest) {
return paddedRow
})
// Compute concise end range from A1 and matrix size (no network round-trip)
const indexToColLetters = (index: number): string => {
let n = index
let s = ''
@@ -314,6 +313,7 @@ export async function POST(request: NextRequest) {
statusText: excelWriteResponse?.statusText,
error: errorText,
})
// Do not fail the entire request; return upload success with write error details
excelWriteResult = {
success: false,
error: `Excel write failed: ${excelWriteResponse?.statusText || 'unknown'}`,
@@ -321,6 +321,7 @@ export async function POST(request: NextRequest) {
}
} else {
const writeData = await excelWriteResponse.json()
// The Range PATCH returns a Range object; log address and values length
const addr = writeData.address || writeData.addressLocal
const v = writeData.values || []
excelWriteResult = {
@@ -332,6 +333,7 @@ export async function POST(request: NextRequest) {
}
}
// Attempt to close the workbook session if one was created
if (workbookSessionId) {
try {
const closeResp = await fetch(

View File

@@ -3,7 +3,6 @@ import { account } from '@sim/db/schema'
import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { validateAlphanumericId } from '@/lib/core/security/input-validation'
import { generateRequestId } from '@/lib/core/utils/request'
import { createLogger } from '@/lib/logs/console/logger'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
@@ -30,13 +29,8 @@ export async function GET(request: Request) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
const credentialIdValidation = validateAlphanumericId(credentialId, 'credentialId')
if (!credentialIdValidation.isValid) {
logger.warn('Invalid credentialId format', { error: credentialIdValidation.error })
return NextResponse.json({ error: credentialIdValidation.error }, { status: 400 })
}
try {
// Ensure we have a session for permission checks
const sessionUserId = session?.user?.id || ''
if (!sessionUserId) {
@@ -44,6 +38,7 @@ export async function GET(request: Request) {
return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
}
// Resolve the credential owner to support collaborator-owned credentials
const creds = await db.select().from(account).where(eq(account.id, credentialId)).limit(1)
if (!creds.length) {
logger.warn('Credential not found', { credentialId })
@@ -84,6 +79,7 @@ export async function GET(request: Request) {
endpoint: 'https://graph.microsoft.com/v1.0/me/mailFolders',
})
// Check for auth errors specifically
if (response.status === 401) {
return NextResponse.json(
{
@@ -100,6 +96,7 @@ export async function GET(request: Request) {
const data = await response.json()
const folders = data.value || []
// Transform folders to match the expected format
const transformedFolders = folders.map((folder: OutlookFolder) => ({
id: folder.id,
name: folder.displayName,
@@ -114,6 +111,7 @@ export async function GET(request: Request) {
} catch (innerError) {
logger.error('Error during API requests:', innerError)
// Check if it's an authentication error
const errorMessage = innerError instanceof Error ? innerError.message : String(innerError)
if (
errorMessage.includes('auth') ||

View File

@@ -64,46 +64,15 @@ export function sanitizeIdentifier(identifier: string): string {
return sanitizeSingleIdentifier(identifier)
}
/**
* Validates a WHERE clause to prevent SQL injection attacks
* @param where - The WHERE clause string to validate
* @throws {Error} If the WHERE clause contains potentially dangerous patterns
*/
function validateWhereClause(where: string): void {
const dangerousPatterns = [
// DDL and DML injection via stacked queries
/;\s*(drop|delete|insert|update|create|alter|grant|revoke)/i,
// Union-based injection
/union\s+(all\s+)?select/i,
// File operations
/union\s+select/i,
/into\s+outfile/i,
/load_file\s*\(/i,
/pg_read_file/i,
// Comment-based injection (can truncate query)
/load_file/i,
/--/,
/\/\*/,
/\*\//,
// Tautologies - always true/false conditions using backreferences
// Matches OR 'x'='x' or OR x=x (same value both sides) but NOT OR col='value'
/\bor\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\bor\s+true\b/i,
/\bor\s+false\b/i,
// AND tautologies (less common but still used in attacks)
/\band\s+(['"]?)(\w+)\1\s*=\s*\1\2\1/i,
/\band\s+true\b/i,
/\band\s+false\b/i,
// Time-based blind injection
/\bsleep\s*\(/i,
/\bwaitfor\s+delay/i,
/\bpg_sleep\s*\(/i,
/\bbenchmark\s*\(/i,
// Stacked queries (any statement after semicolon)
/;\s*\w+/,
// Information schema / system catalog queries
/information_schema/i,
/pg_catalog/i,
// System functions and procedures
/\bxp_cmdshell/i,
]
for (const pattern of dangerousPatterns) {

Some files were not shown because too many files have changed in this diff Show More