Compare commits

...

32 Commits

Author SHA1 Message Date
Waleed
ea8762e99b v0.3.51: mcp support, copilot improvements, polling for live execution data, bug fixes 2025-09-10 14:35:53 -07:00
Waleed
cff0a8718e fix(webhooks): made spacing more clear, added copy button for webhook URL & fixed race condition for mcp tools/server fetching in the mcp block (#1309)
* update infra and remove railway

* fix(webooks-ui): made spacing more clear, added copy button for webhook URL & fixed race condition for mcp tools/server fetching in the mcp block

* Revert "update infra and remove railway"

This reverts commit 5a8876209d.

* remove extraneous comments

* ack PR comments
2025-09-10 14:25:17 -07:00
Vikhyath Mondreti
abca73106d improvement(readme): add e2b reference to readme (#1307) 2025-09-10 10:53:47 -07:00
Waleed
afb99fbaf1 fix(webhook-ui): fixed webhook ui (#1301)
* update infra and remove railway

* fix(webhook-ui): fixed webhook ui

* Revert "update infra and remove railway"

This reverts commit 88669ad0b7.

* feat(control-bar): updated export controls and webhook settings

* additional styling improvements to chat deploy & templates modals

* fix test event

---------

Co-authored-by: Emir Karabeg <emirkarabeg@berkeley.edu>
Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2025-09-10 09:35:28 -07:00
Adam Gough
4d973ffb01 Fix(yaml env var): added env var fallback (#1300)
* added env var to route

* lint

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
2025-09-09 18:11:53 -07:00
Vikhyath Mondreti
8841e9bd6b fix(subflow-validation): validate subflow fields correctly + surface serialization errors in the logs correctly (#1299)
* fix(subflow-validation): validate subflow fields correctly + surface serialiazation errors in the logs correctly

* remove comments
2025-09-09 18:02:30 -07:00
Waleed
3d4b9f0665 feat(mcp): added support for mcp servers (#1296)
* update infra and remove railway

* feat(mcp): add mcp support

* consolidate mcp utils

* UI improvements, more MCP stuff

* cleanup placeholders

* reran migrations

* general improvements

* fix server side mcp exec

* more improvements, fixed search in environment settings tab

* persist subblock values for mcp block

* style fixes

* udpdate all text-primary to text-muted-foreground for visibility in dark mode

* Revert "update infra and remove railway"

This reverts commit dbf2b153b8f96808e7bb7e5f86f7e8624e3c12dd.

* make MCP servers workspace-scoped

* cleanup & remove unused dep

* consolidated utils, DRY

* added tests

* better error messages, confirmed that permissions works correctly

* additional improvements

* remove extraneous comments

* reran migrations

* lint

* style changes

* fix: prevent config mutation in MCP client URL retry logic

Fixed an issue where the MCP client was mutating the shared configuration
object's URL during retry attempts. This could cause configuration corruption
if the same config object was reused elsewhere.

* resolve PR comments

* ack PR comments
2025-09-09 17:18:08 -07:00
Vikhyath Mondreti
c48039f97f improvement(subblock-defaults): custom defaults for subblocks if needed (#1298) 2025-09-09 17:12:10 -07:00
Waleed
8f7b11f089 feat(account): added user profile pictures in settings (#1297)
* update infra and remove railway

* feat(account): add profile pictures

* Revert "update infra and remove railway"

This reverts commit e3f0c49456.

* ack PR comments, use brandConfig logo URL as default
2025-09-09 16:09:31 -07:00
Waleed
ae670a7819 fix(start-input): restore tag dropdown in input-format component (#1294)
* update infra and remove railway

* fix(input-format): restore tag dropdown in input-format component

* Revert "update infra and remove railway"

This reverts commit 7ade5fb2ef.

* style improvements
2025-09-09 12:58:21 -07:00
Vikhyath Mondreti
a5c224e4b0 fix(workflow-block): remove process specific circular dependency check (#1293)
* fix(workflow-block): remove process specific circular dep check

* remove comments
2025-09-09 12:50:25 -07:00
Vikhyath Mondreti
0785f6e920 feat(logs-api): expose logs as api + can subscribe to workflow execution using webhook url (#1287)
* feat(logs-api): expose logs as api + can subscribe to workflow exection using webhook url

* fix scroll

* Update apps/docs/content/docs/execution/api.mdx

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>

* fix rate limits

* address greptile comments

* remove unused file

* address more greptile comments

* minor UI changes

* fix atomicity to prevent races

* make search param sensible

---------

Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-09-09 11:34:18 -07:00
Siddharth Ganesan
cf4a935575 Merge pull request #1286 from simstudioai/fix/copilot-custom-tools
fix(copilot): custom tools
2025-09-08 17:23:58 -07:00
Siddharth Ganesan
521316bb8c Lint 2025-09-08 16:39:57 -07:00
Vikhyath Mondreti
d357280003 feat(usage-api): make external endpoint to query usage (#1285)
* feat(usage-api): make external endpoint to query usage

* add docs

* consolidate endpoints with rate-limits one

* update docs

* consolidate code

* remove unused route
2025-09-08 16:35:58 -07:00
Siddharth Ganesan
adf8c2244c Fix custom tool save 2025-09-08 15:46:59 -07:00
Siddharth Ganesan
ebfdb9ce3b V1 2025-09-08 15:23:15 -07:00
Vikhyath Mondreti
784992f347 v0.3.50: debounce moved server side, hasWorkflowChanged fixes, advanced mode/serializer fix, jira fix, billing notifs 2025-09-08 11:53:44 -07:00
Waleed
5218dd41b9 fix(notifications): increase precision on billing calculations (#1283)
* update infra and remove railway

* fix(notifications): increase precision on billing calculations

* Revert "update infra and remove railway"

This reverts commit d17603e844.

* cleanup
2025-09-08 10:46:23 -07:00
Waleed
07e70409c7 feat(notifications): added notifications for usage thresholds, overages, and welcome emails (#1266)
* feat(notifications): added notifications for usage thresholds, overages, and welcome emails

* cleanup

* updated logo, ack PR comments

* ran migrations
2025-09-08 09:47:16 -07:00
Adam Gough
07ba17422b Fix(jira): reading multiple issues and write
fixed the read and write tools in jira
2025-09-06 20:48:49 -07:00
Waleed
d45324bb83 fix(sidebar): draggable cursor on sidebar when switching workflows (#1276) 2025-09-06 19:52:23 -07:00
Vikhyath Mondreti
ced64129da fix(subblock-param-mapping): consolidate resolution of advanced / basic mode params using canonicalParamId (#1274)
* fix(serializer): block's params mapper not running first

* fix

* fix

* revert

* add canonicalParamId

* fix

* fix tests

* fix discord

* fix condition checking

* edit condition check

* fix

* fix subblock config check

* fix

* add logging

* add more logs

* fix

* fix

* attempt

* fix discord

* remove unused discord code

* mark as required correctly
2025-09-06 17:33:49 -07:00
Vikhyath Mondreti
1e14743391 fix(sockets): move debounce to server side (#1265)
* fix(sockets): move debounce to server side

* remove comments / unused onBlur
2025-09-06 12:49:35 -07:00
Waleed
a0bb754c8c 0.3.49: readme updates, router block and variables improvements 2025-09-05 14:58:39 -07:00
Waleed
851031239d fix(variables): add back ability to reference root block like <start> (#1262) 2025-09-05 14:45:26 -07:00
Waleed
3811b509ef fix(router): change router block content to prompt (#1261)
* fix(router): remove prompt from router content

* fixed router
2025-09-05 13:39:04 -07:00
Vikhyath Mondreti
abb835d22d fix(schedule-self-host): remove incorrect migration (#1260)
* fix(schedule-self-host): remove incorrect migration

* delete unintentional file
2025-09-05 11:52:39 -07:00
Vikhyath Mondreti
f2a046ff24 improvement(docs): readme.md to mention .env setup for copilot setup 2025-09-05 11:01:54 -07:00
Vikhyath Mondreti
bd6d4a91a3 0.3.48: revert trigger dev bypass for enterprise users 2025-09-04 23:57:22 -07:00
Vikhyath Mondreti
21beca8fd5 fix(cleanup): cleanup unused vars + webhook typo (#1259) 2025-09-04 23:52:31 -07:00
Vikhyath Mondreti
0a86eda853 Revert "feat(enterprise-plan-webhooks): skip webhook queue for enterprise plan users (#1250)" (#1257)
This reverts commit 37dcde2afc.
2025-09-04 23:37:19 -07:00
307 changed files with 33670 additions and 2923 deletions

View File

@@ -159,7 +159,7 @@ bun run dev:sockets
Copilot is a Sim-managed service. To use Copilot on a self-hosted instance:
- Go to https://sim.ai → Settings → Copilot and generate a Copilot API key
- Set `COPILOT_API_KEY` in your self-hosted environment to that value
- Set `COPILOT_API_KEY` environment variable in your self-hosted apps/sim/.env file to that value
## Tech Stack
@@ -174,6 +174,7 @@ Copilot is a Sim-managed service. To use Copilot on a self-hosted instance:
- **Monorepo**: [Turborepo](https://turborepo.org/)
- **Realtime**: [Socket.io](https://socket.io/)
- **Background Jobs**: [Trigger.dev](https://trigger.dev/)
- **Remote Code Execution**: [E2B](https://www.e2b.dev/)
## Contributing

View File

@@ -117,7 +117,7 @@ Your API key for the selected LLM provider. This is securely stored and used for
After a router makes a decision, you can access its outputs:
- **`<router.content>`**: Summary of the routing decision made
- **`<router.prompt>`**: Summary of the routing prompt used
- **`<router.selected_path>`**: Details of the chosen destination block
- **`<router.tokens>`**: Token usage statistics from the LLM
- **`<router.model>`**: The model used for decision-making
@@ -182,7 +182,7 @@ Confidence Threshold: 0.7 // Minimum confidence for routing
<Tab>
<ul className="list-disc space-y-2 pl-6">
<li>
<strong>router.content</strong>: Summary of routing decision
<strong>router.prompt</strong>: Summary of routing prompt used
</li>
<li>
<strong>router.selected_path</strong>: Details of chosen destination

View File

@@ -212,3 +212,47 @@ Monitor your usage and billing in Settings → Subscription:
- **Usage Limits**: Plan limits with visual progress indicators
- **Billing Details**: Projected charges and minimum commitments
- **Plan Management**: Upgrade options and billing history
### Programmatic Rate Limits & Usage (API)
You can query your current API rate limits and usage summary using your API key.
Endpoint:
```text
GET /api/users/me/usage-limits
```
Authentication:
- Include your API key in the `X-API-Key` header.
Response (example):
```json
{
"success": true,
"rateLimit": {
"sync": { "isLimited": false, "limit": 10, "remaining": 10, "resetAt": "2025-09-08T22:51:55.999Z" },
"async": { "isLimited": false, "limit": 50, "remaining": 50, "resetAt": "2025-09-08T22:51:56.155Z" },
"authType": "api"
},
"usage": {
"currentPeriodCost": 12.34,
"limit": 100,
"plan": "pro"
}
}
```
Example:
```bash
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
```
Notes:
- `currentPeriodCost` reflects usage in the current billing period.
- `limit` is derived from individual limits (Free/Pro) or pooled organization limits (Team/Enterprise).
- `plan` is the highest-priority active plan associated with your user.

View File

@@ -0,0 +1,532 @@
---
title: External API
description: Query workflow execution logs and set up webhooks for real-time notifications
---
import { Accordion, Accordions } from 'fumadocs-ui/components/accordion'
import { Callout } from 'fumadocs-ui/components/callout'
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
import { CodeBlock } from 'fumadocs-ui/components/codeblock'
Sim provides a comprehensive external API for querying workflow execution logs and setting up webhooks for real-time notifications when workflows complete.
## Authentication
All API requests require an API key passed in the `x-api-key` header:
```bash
curl -H "x-api-key: YOUR_API_KEY" \
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
```
You can generate API keys from your user settings in the Sim dashboard.
## Logs API
All API responses include information about your workflow execution limits and usage:
```json
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"limit": 60, // Max sync workflow executions per minute
"remaining": 58, // Remaining sync workflow executions
"resetAt": "..." // When the window resets
},
"async": {
"limit": 60, // Max async workflow executions per minute
"remaining": 59, // Remaining async workflow executions
"resetAt": "..." // When the window resets
}
},
"usage": {
"currentPeriodCost": 1.234, // Current billing period usage in USD
"limit": 10, // Usage limit in USD
"plan": "pro", // Current subscription plan
"isExceeded": false // Whether limit is exceeded
}
}
```
**Note:** The rate limits in the response body are for workflow executions. The rate limits for calling this API endpoint are in the response headers (`X-RateLimit-*`).
### Query Logs
Query workflow execution logs with extensive filtering options.
<Tabs items={['Request', 'Response']}>
<Tab value="Request">
```http
GET /api/v1/logs
```
**Required Parameters:**
- `workspaceId` - Your workspace ID
**Optional Filters:**
- `workflowIds` - Comma-separated workflow IDs
- `folderIds` - Comma-separated folder IDs
- `triggers` - Comma-separated trigger types: `api`, `webhook`, `schedule`, `manual`, `chat`
- `level` - Filter by level: `info`, `error`
- `startDate` - ISO timestamp for date range start
- `endDate` - ISO timestamp for date range end
- `executionId` - Exact execution ID match
- `minDurationMs` - Minimum execution duration in milliseconds
- `maxDurationMs` - Maximum execution duration in milliseconds
- `minCost` - Minimum execution cost
- `maxCost` - Maximum execution cost
- `model` - Filter by AI model used
**Pagination:**
- `limit` - Results per page (default: 100)
- `cursor` - Cursor for next page
- `order` - Sort order: `desc`, `asc` (default: desc)
**Detail Level:**
- `details` - Response detail level: `basic`, `full` (default: basic)
- `includeTraceSpans` - Include trace spans (default: false)
- `includeFinalOutput` - Include final output (default: false)
</Tab>
<Tab value="Response">
```json
{
"data": [
{
"id": "log_abc123",
"workflowId": "wf_xyz789",
"executionId": "exec_def456",
"level": "info",
"trigger": "api",
"startedAt": "2025-01-01T12:34:56.789Z",
"endedAt": "2025-01-01T12:34:57.123Z",
"totalDurationMs": 334,
"cost": {
"total": 0.00234
},
"files": null
}
],
"nextCursor": "eyJzIjoiMjAyNS0wMS0wMVQxMjozNDo1Ni43ODlaIiwiaWQiOiJsb2dfYWJjMTIzIn0",
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"limit": 60,
"remaining": 58,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"limit": 60,
"remaining": 59,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
}
}
```
</Tab>
</Tabs>
### Get Log Details
Retrieve detailed information about a specific log entry.
<Tabs items={['Request', 'Response']}>
<Tab value="Request">
```http
GET /api/v1/logs/{id}
```
</Tab>
<Tab value="Response">
```json
{
"data": {
"id": "log_abc123",
"workflowId": "wf_xyz789",
"executionId": "exec_def456",
"level": "info",
"trigger": "api",
"startedAt": "2025-01-01T12:34:56.789Z",
"endedAt": "2025-01-01T12:34:57.123Z",
"totalDurationMs": 334,
"workflow": {
"id": "wf_xyz789",
"name": "My Workflow",
"description": "Process customer data"
},
"executionData": {
"traceSpans": [...],
"finalOutput": {...}
},
"cost": {
"total": 0.00234,
"tokens": {
"prompt": 123,
"completion": 456,
"total": 579
},
"models": {
"gpt-4o": {
"input": 0.001,
"output": 0.00134,
"total": 0.00234,
"tokens": {
"prompt": 123,
"completion": 456,
"total": 579
}
}
}
},
"limits": {
"workflowExecutionRateLimit": {
"sync": {
"limit": 60,
"remaining": 58,
"resetAt": "2025-01-01T12:35:56.789Z"
},
"async": {
"limit": 60,
"remaining": 59,
"resetAt": "2025-01-01T12:35:56.789Z"
}
},
"usage": {
"currentPeriodCost": 1.234,
"limit": 10,
"plan": "pro",
"isExceeded": false
}
}
}
}
```
</Tab>
</Tabs>
### Get Execution Details
Retrieve execution details including the workflow state snapshot.
<Tabs items={['Request', 'Response']}>
<Tab value="Request">
```http
GET /api/v1/logs/executions/{executionId}
```
</Tab>
<Tab value="Response">
```json
{
"executionId": "exec_def456",
"workflowId": "wf_xyz789",
"workflowState": {
"blocks": {...},
"edges": [...],
"loops": {...},
"parallels": {...}
},
"executionMetadata": {
"trigger": "api",
"startedAt": "2025-01-01T12:34:56.789Z",
"endedAt": "2025-01-01T12:34:57.123Z",
"totalDurationMs": 334,
"cost": {...}
}
}
```
</Tab>
</Tabs>
## Webhook Subscriptions
Get real-time notifications when workflow executions complete. Webhooks are configured through the Sim UI in the workflow editor.
### Configuration
Webhooks can be configured for each workflow through the workflow editor UI. Click the webhook icon in the control bar to set up your webhook subscriptions.
**Available Configuration Options:**
- `url`: Your webhook endpoint URL
- `secret`: Optional secret for HMAC signature verification
- `includeFinalOutput`: Include the workflow's final output in the payload
- `includeTraceSpans`: Include detailed execution trace spans
- `includeRateLimits`: Include the workflow owner's rate limit information
- `includeUsageData`: Include the workflow owner's usage and billing data
- `levelFilter`: Array of log levels to receive (`info`, `error`)
- `triggerFilter`: Array of trigger types to receive (`api`, `webhook`, `schedule`, `manual`, `chat`)
- `active`: Enable/disable the webhook subscription
### Webhook Payload
When a workflow execution completes, Sim sends a POST request to your webhook URL:
```json
{
"id": "evt_123",
"type": "workflow.execution.completed",
"timestamp": 1735925767890,
"data": {
"workflowId": "wf_xyz789",
"executionId": "exec_def456",
"status": "success",
"level": "info",
"trigger": "api",
"startedAt": "2025-01-01T12:34:56.789Z",
"endedAt": "2025-01-01T12:34:57.123Z",
"totalDurationMs": 334,
"cost": {
"total": 0.00234,
"tokens": {
"prompt": 123,
"completion": 456,
"total": 579
},
"models": {
"gpt-4o": {
"input": 0.001,
"output": 0.00134,
"total": 0.00234,
"tokens": {
"prompt": 123,
"completion": 456,
"total": 579
}
}
}
},
"files": null,
"finalOutput": {...}, // Only if includeFinalOutput=true
"traceSpans": [...], // Only if includeTraceSpans=true
"rateLimits": {...}, // Only if includeRateLimits=true
"usage": {...} // Only if includeUsageData=true
},
"links": {
"log": "/v1/logs/log_abc123",
"execution": "/v1/logs/executions/exec_def456"
}
}
```
### Webhook Headers
Each webhook request includes these headers:
- `sim-event`: Event type (always `workflow.execution.completed`)
- `sim-timestamp`: Unix timestamp in milliseconds
- `sim-delivery-id`: Unique delivery ID for idempotency
- `sim-signature`: HMAC-SHA256 signature for verification (if secret configured)
- `Idempotency-Key`: Same as delivery ID for duplicate detection
### Signature Verification
If you configure a webhook secret, verify the signature to ensure the webhook is from Sim:
<Tabs items={['Node.js', 'Python']}>
<Tab value="Node.js">
```javascript
import crypto from 'crypto';
function verifyWebhookSignature(body, signature, secret) {
const [timestampPart, signaturePart] = signature.split(',');
const timestamp = timestampPart.replace('t=', '');
const expectedSignature = signaturePart.replace('v1=', '');
const signatureBase = `${timestamp}.${body}`;
const hmac = crypto.createHmac('sha256', secret);
hmac.update(signatureBase);
const computedSignature = hmac.digest('hex');
return computedSignature === expectedSignature;
}
// In your webhook handler
app.post('/webhook', (req, res) => {
const signature = req.headers['sim-signature'];
const body = JSON.stringify(req.body);
if (!verifyWebhookSignature(body, signature, process.env.WEBHOOK_SECRET)) {
return res.status(401).send('Invalid signature');
}
// Process the webhook...
});
```
</Tab>
<Tab value="Python">
```python
import hmac
import hashlib
import json
def verify_webhook_signature(body: str, signature: str, secret: str) -> bool:
timestamp_part, signature_part = signature.split(',')
timestamp = timestamp_part.replace('t=', '')
expected_signature = signature_part.replace('v1=', '')
signature_base = f"{timestamp}.{body}"
computed_signature = hmac.new(
secret.encode(),
signature_base.encode(),
hashlib.sha256
).hexdigest()
return hmac.compare_digest(computed_signature, expected_signature)
# In your webhook handler
@app.route('/webhook', methods=['POST'])
def webhook():
signature = request.headers.get('sim-signature')
body = json.dumps(request.json)
if not verify_webhook_signature(body, signature, os.environ['WEBHOOK_SECRET']):
return 'Invalid signature', 401
# Process the webhook...
```
</Tab>
</Tabs>
### Retry Policy
Failed webhook deliveries are retried with exponential backoff and jitter:
- Maximum attempts: 5
- Retry delays: 5 seconds, 15 seconds, 1 minute, 3 minutes, 10 minutes
- Jitter: Up to 10% additional delay to prevent thundering herd
- Only HTTP 5xx and 429 responses trigger retries
- Deliveries timeout after 30 seconds
<Callout type="info">
Webhook deliveries are processed asynchronously and don't affect workflow execution performance.
</Callout>
## Best Practices
1. **Polling Strategy**: When polling for logs, use cursor-based pagination with `order=asc` and `startDate` to fetch new logs efficiently.
2. **Webhook Security**: Always configure a webhook secret and verify signatures to ensure requests are from Sim.
3. **Idempotency**: Use the `Idempotency-Key` header to detect and handle duplicate webhook deliveries.
4. **Privacy**: By default, `finalOutput` and `traceSpans` are excluded from responses. Only enable these if you need the data and understand the privacy implications.
5. **Rate Limiting**: Implement exponential backoff when you receive 429 responses. Check the `Retry-After` header for the recommended wait time.
## Rate Limiting
The API implements rate limiting to ensure fair usage:
- **Free plan**: 10 requests per minute
- **Pro plan**: 30 requests per minute
- **Team plan**: 60 requests per minute
- **Enterprise plan**: Custom limits
Rate limit information is included in response headers:
- `X-RateLimit-Limit`: Maximum requests per window
- `X-RateLimit-Remaining`: Requests remaining in current window
- `X-RateLimit-Reset`: ISO timestamp when the window resets
## Example: Polling for New Logs
```javascript
let cursor = null;
const workspaceId = 'YOUR_WORKSPACE_ID';
const startDate = new Date().toISOString();
async function pollLogs() {
const params = new URLSearchParams({
workspaceId,
startDate,
order: 'asc',
limit: '100'
});
if (cursor) {
params.append('cursor', cursor);
}
const response = await fetch(
`https://sim.ai/api/v1/logs?${params}`,
{
headers: {
'x-api-key': 'YOUR_API_KEY'
}
}
);
if (response.ok) {
const data = await response.json();
// Process new logs
for (const log of data.data) {
console.log(`New execution: ${log.executionId}`);
}
// Update cursor for next poll
if (data.nextCursor) {
cursor = data.nextCursor;
}
}
}
// Poll every 30 seconds
setInterval(pollLogs, 30000);
```
## Example: Processing Webhooks
```javascript
import express from 'express';
import crypto from 'crypto';
const app = express();
app.use(express.json());
app.post('/sim-webhook', (req, res) => {
// Verify signature
const signature = req.headers['sim-signature'];
const body = JSON.stringify(req.body);
if (!verifyWebhookSignature(body, signature, process.env.WEBHOOK_SECRET)) {
return res.status(401).send('Invalid signature');
}
// Check timestamp to prevent replay attacks
const timestamp = parseInt(req.headers['sim-timestamp']);
const fiveMinutesAgo = Date.now() - (5 * 60 * 1000);
if (timestamp < fiveMinutesAgo) {
return res.status(401).send('Timestamp too old');
}
// Process the webhook
const event = req.body;
switch (event.type) {
case 'workflow.execution.completed':
const { workflowId, executionId, status, cost } = event.data;
if (status === 'error') {
console.error(`Workflow ${workflowId} failed: ${executionId}`);
// Handle error...
} else {
console.log(`Workflow ${workflowId} completed: ${executionId}`);
console.log(`Cost: $${cost.total}`);
// Process successful execution...
}
break;
}
// Return 200 to acknowledge receipt
res.status(200).send('OK');
});
app.listen(3000, () => {
console.log('Webhook server listening on port 3000');
});
```

View File

@@ -1,4 +1,4 @@
{
"title": "Execution",
"pages": ["basics", "advanced"]
"pages": ["basics", "advanced", "api"]
}

View File

@@ -58,7 +58,7 @@ Retrieve detailed information about a specific Jira issue
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `domain` | string | Yes | Your Jira domain \(e.g., yourcompany.atlassian.net\) |
| `projectId` | string | No | Jira project ID to retrieve issues from. If not provided, all issues will be retrieved. |
| `projectId` | string | No | Jira project ID \(optional; not required to retrieve a single issue\). |
| `issueKey` | string | Yes | Jira issue key to retrieve \(e.g., PROJ-123\) |
| `cloudId` | string | No | Jira Cloud ID for the instance. If not provided, it will be fetched using the domain. |

View File

@@ -101,7 +101,7 @@ function ResetPasswordContent() {
</CardContent>
<CardFooter>
<p className='w-full text-center text-gray-500 text-sm'>
<Link href='/login' className='text-primary hover:underline'>
<Link href='/login' className='text-muted-foreground hover:underline'>
Back to login
</Link>
</p>

View File

@@ -3,7 +3,6 @@
*/
import { afterEach, beforeEach, vi } from 'vitest'
// Mock Next.js implementations
vi.mock('next/headers', () => ({
cookies: () => ({
get: vi.fn().mockReturnValue({ value: 'test-session-token' }),
@@ -13,7 +12,6 @@ vi.mock('next/headers', () => ({
}),
}))
// Mock auth utilities
vi.mock('@/lib/auth/session', () => ({
getSession: vi.fn().mockResolvedValue({
user: {
@@ -24,13 +22,10 @@ vi.mock('@/lib/auth/session', () => ({
}),
}))
// Configure Vitest environment
beforeEach(() => {
// Clear all mocks before each test
vi.clearAllMocks()
})
afterEach(() => {
// Ensure all mocks are restored after each test
vi.restoreAllMocks()
})

View File

@@ -944,12 +944,10 @@ export interface TestSetupOptions {
export function setupComprehensiveTestMocks(options: TestSetupOptions = {}) {
const { auth = { authenticated: true }, database = {}, storage, authApi, features = {} } = options
// Setup basic infrastructure mocks
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
// Setup authentication
const authMocks = mockAuth(auth.user)
if (auth.authenticated) {
authMocks.setAuthenticated(auth.user)
@@ -957,22 +955,18 @@ export function setupComprehensiveTestMocks(options: TestSetupOptions = {}) {
authMocks.setUnauthenticated()
}
// Setup database
const dbMocks = createMockDatabase(database)
// Setup storage if needed
let storageMocks
if (storage) {
storageMocks = createStorageProviderMocks(storage)
}
// Setup auth API if needed
let authApiMocks
if (authApi) {
authApiMocks = createAuthApiMocks(authApi)
}
// Setup feature-specific mocks
const featureMocks: any = {}
if (features.workflowUtils) {
featureMocks.workflowUtils = mockWorkflowUtils()
@@ -1008,12 +1002,10 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
let selectCallCount = 0
// Helper to create error
const createDbError = (operation: string, message?: string) => {
return new Error(message || `Database ${operation} error`)
}
// Create chainable select mock
const createSelectChain = () => ({
from: vi.fn().mockReturnThis(),
leftJoin: vi.fn().mockReturnThis(),
@@ -1038,7 +1030,6 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
}),
})
// Create insert chain
const createInsertChain = () => ({
values: vi.fn().mockImplementation(() => ({
returning: vi.fn().mockImplementation(() => {
@@ -1056,7 +1047,6 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
})),
})
// Create update chain
const createUpdateChain = () => ({
set: vi.fn().mockImplementation(() => ({
where: vi.fn().mockImplementation(() => {
@@ -1068,7 +1058,6 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
})),
})
// Create delete chain
const createDeleteChain = () => ({
where: vi.fn().mockImplementation(() => {
if (deleteOptions.throwError) {
@@ -1078,7 +1067,6 @@ export function createMockDatabase(options: MockDatabaseOptions = {}) {
}),
})
// Create transaction mock
const createTransactionMock = () => {
return vi.fn().mockImplementation(async (callback: any) => {
if (transactionOptions.throwError) {
@@ -1200,7 +1188,6 @@ export function setupKnowledgeMocks(
mocks.generateEmbedding = vi.fn().mockResolvedValue([0.1, 0.2, 0.3])
}
// Mock the knowledge utilities
vi.doMock('@/app/api/knowledge/utils', () => mocks)
return mocks
@@ -1218,12 +1205,10 @@ export function setupFileApiMocks(
) {
const { authenticated = true, storageProvider = 's3', cloudEnabled = true } = options
// Setup basic mocks
setupCommonApiMocks()
mockUuid()
mockCryptoUuid()
// Setup auth
const authMocks = mockAuth()
if (authenticated) {
authMocks.setAuthenticated()
@@ -1231,14 +1216,12 @@ export function setupFileApiMocks(
authMocks.setUnauthenticated()
}
// Setup file system mocks
mockFileSystem({
writeFileSuccess: true,
readFileContent: 'test content',
existsResult: true,
})
// Setup storage provider mocks (this will mock @/lib/uploads)
let storageMocks
if (storageProvider) {
storageMocks = createStorageProviderMocks({
@@ -1246,7 +1229,6 @@ export function setupFileApiMocks(
isCloudEnabled: cloudEnabled,
})
} else {
// If no storage provider specified, just mock the base functions
vi.doMock('@/lib/uploads', () => ({
getStorageProvider: vi.fn().mockReturnValue('local'),
isUsingCloudStorage: vi.fn().mockReturnValue(cloudEnabled),

View File

@@ -3,6 +3,7 @@ import { jwtDecode } from 'jwt-decode'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { account, user } from '@/db/schema'
@@ -18,7 +19,7 @@ interface GoogleIdToken {
* Get all OAuth connections for the current user
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -6,6 +6,7 @@ import { createLogger } from '@/lib/logs/console/logger'
import type { OAuthService } from '@/lib/oauth/oauth'
import { parseProvider } from '@/lib/oauth/oauth'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { account, user, workflow } from '@/db/schema'
@@ -23,7 +24,7 @@ interface GoogleIdToken {
* Get credentials for a specific provider
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get query params

View File

@@ -2,6 +2,7 @@ import { and, eq, like, or } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -13,7 +14,7 @@ const logger = createLogger('OAuthDisconnectAPI')
* Disconnect an OAuth provider for the current user
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -14,7 +15,7 @@ const logger = createLogger('MicrosoftFileAPI')
* Get a single file from Microsoft OneDrive
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session
const session = await getSession()

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -14,7 +15,7 @@ const logger = createLogger('MicrosoftFilesAPI')
* Get Excel files from Microsoft OneDrive
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8) // Generate a short request ID for correlation
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -2,6 +2,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { getCredential, refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
@@ -14,7 +15,7 @@ const logger = createLogger('OAuthTokenAPI')
* and workflow-based authentication (for server-side requests)
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
logger.info(`[${requestId}] OAuth token API POST request received`)
@@ -59,7 +60,7 @@ export async function POST(request: NextRequest) {
* Get the access token for a specific credential
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8) // Short request ID for correlation
const requestId = generateRequestId()
try {
// Get the credential ID from the query params

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -14,7 +15,7 @@ const logger = createLogger('WealthboxItemAPI')
* Get a single item (note, contact, task) from Wealthbox
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -14,7 +15,7 @@ const logger = createLogger('WealthboxItemsAPI')
* Get items (notes, contacts, tasks) from Wealthbox
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -1,10 +1,10 @@
import crypto from 'crypto'
import { eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkInternalApiKey } from '@/lib/copilot/utils'
import { isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { userStats } from '@/db/schema'
import { calculateCost } from '@/providers/utils'
@@ -25,7 +25,7 @@ const UpdateCostSchema = z.object({
* Update user cost based on token usage with internal API key auth
*/
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = Date.now()
try {

View File

@@ -5,6 +5,7 @@ import { renderOTPEmail } from '@/components/emails/render-email'
import { sendEmail } from '@/lib/email/mailer'
import { createLogger } from '@/lib/logs/console/logger'
import { getRedisClient, markMessageAsProcessed, releaseLock } from '@/lib/redis'
import { generateRequestId } from '@/lib/utils'
import { addCorsHeaders, setChatAuthCookie } from '@/app/api/chat/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
@@ -115,7 +116,7 @@ export async function POST(
{ params }: { params: Promise<{ subdomain: string }> }
) {
const { subdomain } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Processing OTP request for subdomain: ${subdomain}`)
@@ -229,7 +230,7 @@ export async function PUT(
{ params }: { params: Promise<{ subdomain: string }> }
) {
const { subdomain } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Verifying OTP for subdomain: ${subdomain}`)

View File

@@ -1,6 +1,7 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import {
addCorsHeaders,
executeWorkflowForChat,
@@ -20,7 +21,7 @@ export async function POST(
{ params }: { params: Promise<{ subdomain: string }> }
) {
const { subdomain } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Processing chat request for subdomain: ${subdomain}`)
@@ -141,7 +142,7 @@ export async function GET(
{ params }: { params: Promise<{ subdomain: string }> }
) {
const { subdomain } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Fetching chat info for subdomain: ${subdomain}`)

View File

@@ -14,10 +14,6 @@ vi.mock('@/db', () => ({
},
}))
vi.mock('@/lib/utils', () => ({
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-secret' }),
}))
vi.mock('@/lib/logs/execution/logging-session', () => ({
LoggingSession: vi.fn().mockImplementation(() => ({
safeStart: vi.fn().mockResolvedValue(undefined),
@@ -38,6 +34,13 @@ vi.mock('@/stores/workflows/server-utils', () => ({
mergeSubblockState: vi.fn().mockReturnValue({}),
}))
const mockDecryptSecret = vi.fn()
vi.mock('@/lib/utils', () => ({
decryptSecret: mockDecryptSecret,
generateRequestId: vi.fn(),
}))
describe('Chat API Utils', () => {
beforeEach(() => {
vi.resetModules()
@@ -177,7 +180,10 @@ describe('Chat API Utils', () => {
})
describe('Chat auth validation', () => {
beforeEach(() => {
beforeEach(async () => {
vi.clearAllMocks()
mockDecryptSecret.mockResolvedValue({ decrypted: 'correct-password' })
vi.doMock('@/app/api/chat/utils', async (importOriginal) => {
const original = (await importOriginal()) as any
return {
@@ -190,13 +196,6 @@ describe('Chat API Utils', () => {
}),
}
})
// Mock decryptSecret globally for all auth tests
vi.doMock('@/lib/utils', () => ({
decryptSecret: vi.fn((encryptedValue) => {
return Promise.resolve({ decrypted: 'correct-password' })
}),
}))
})
it.concurrent('should allow access to public chats', async () => {

View File

@@ -10,7 +10,7 @@ import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { hasAdminPermission } from '@/lib/permissions/utils'
import { processStreamingBlockLogs } from '@/lib/tokenization'
import { getEmailDomain } from '@/lib/urls/utils'
import { decryptSecret } from '@/lib/utils'
import { decryptSecret, generateRequestId } from '@/lib/utils'
import { getBlock } from '@/blocks'
import { db } from '@/db'
import { chat, userStats, workflow } from '@/db/schema'
@@ -303,7 +303,7 @@ export async function executeWorkflowForChat(
input: string,
conversationId?: string
): Promise<any> {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
logger.debug(
`[${requestId}] Executing workflow for chat: ${chatId}${

View File

@@ -99,6 +99,7 @@ describe('Copilot Chat API Route', () => {
vi.doMock('@/lib/utils', () => ({
getRotatingApiKey: mockGetRotatingApiKey,
generateRequestId: vi.fn(() => 'test-request-id'),
}))
vi.doMock('@/lib/env', () => ({

View File

@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { decryptSecret, encryptSecret } from '@/lib/utils'
import { decryptSecret, encryptSecret, generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { environment } from '@/db/schema'
import type { EnvironmentVariable } from '@/stores/settings/environment/types'
@@ -15,7 +15,7 @@ const EnvVarSchema = z.object({
})
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -72,7 +72,7 @@ export async function POST(req: NextRequest) {
}
export async function GET(request: Request) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -12,10 +12,12 @@ import {
BLOB_CONFIG,
BLOB_COPILOT_CONFIG,
BLOB_KB_CONFIG,
BLOB_PROFILE_PICTURES_CONFIG,
S3_CHAT_CONFIG,
S3_CONFIG,
S3_COPILOT_CONFIG,
S3_KB_CONFIG,
S3_PROFILE_PICTURES_CONFIG,
} from '@/lib/uploads/setup'
import { validateFileType } from '@/lib/uploads/validation'
import { createErrorResponse, createOptionsResponse } from '@/app/api/files/utils'
@@ -30,7 +32,7 @@ interface PresignedUrlRequest {
chatId?: string
}
type UploadType = 'general' | 'knowledge-base' | 'chat' | 'copilot'
type UploadType = 'general' | 'knowledge-base' | 'chat' | 'copilot' | 'profile-pictures'
class PresignedUrlError extends Error {
constructor(
@@ -96,7 +98,9 @@ export async function POST(request: NextRequest) {
? 'chat'
: uploadTypeParam === 'copilot'
? 'copilot'
: 'general'
: uploadTypeParam === 'profile-pictures'
? 'profile-pictures'
: 'general'
if (uploadType === 'knowledge-base') {
const fileValidationError = validateFileType(fileName, contentType)
@@ -121,6 +125,21 @@ export async function POST(request: NextRequest) {
}
}
// Validate profile picture requirements
if (uploadType === 'profile-pictures') {
if (!sessionUserId?.trim()) {
throw new ValidationError(
'Authenticated user session is required for profile picture uploads'
)
}
// Only allow image uploads for profile pictures
if (!isImageFileType(contentType)) {
throw new ValidationError(
'Only image files (JPEG, PNG, GIF, WebP, SVG) are allowed for profile picture uploads'
)
}
}
if (!isUsingCloudStorage()) {
throw new StorageConfigError(
'Direct uploads are only available when cloud storage is enabled'
@@ -185,7 +204,9 @@ async function handleS3PresignedUrl(
? S3_CHAT_CONFIG
: uploadType === 'copilot'
? S3_COPILOT_CONFIG
: S3_CONFIG
: uploadType === 'profile-pictures'
? S3_PROFILE_PICTURES_CONFIG
: S3_CONFIG
if (!config.bucket || !config.region) {
throw new StorageConfigError(`S3 configuration missing for ${uploadType} uploads`)
@@ -200,6 +221,8 @@ async function handleS3PresignedUrl(
prefix = 'chat/'
} else if (uploadType === 'copilot') {
prefix = `${userId}/`
} else if (uploadType === 'profile-pictures') {
prefix = `${userId}/`
}
const uniqueKey = `${prefix}${uuidv4()}-${safeFileName}`
@@ -219,6 +242,9 @@ async function handleS3PresignedUrl(
} else if (uploadType === 'copilot') {
metadata.purpose = 'copilot'
metadata.userId = userId || ''
} else if (uploadType === 'profile-pictures') {
metadata.purpose = 'profile-pictures'
metadata.userId = userId || ''
}
const command = new PutObjectCommand({
@@ -239,9 +265,9 @@ async function handleS3PresignedUrl(
)
}
// For chat images and knowledge base files, use direct URLs since they need to be accessible by external services
// For chat images, knowledge base files, and profile pictures, use direct URLs since they need to be accessible by external services
const finalPath =
uploadType === 'chat' || uploadType === 'knowledge-base'
uploadType === 'chat' || uploadType === 'knowledge-base' || uploadType === 'profile-pictures'
? `https://${config.bucket}.s3.${config.region}.amazonaws.com/${uniqueKey}`
: `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
@@ -285,7 +311,9 @@ async function handleBlobPresignedUrl(
? BLOB_CHAT_CONFIG
: uploadType === 'copilot'
? BLOB_COPILOT_CONFIG
: BLOB_CONFIG
: uploadType === 'profile-pictures'
? BLOB_PROFILE_PICTURES_CONFIG
: BLOB_CONFIG
if (
!config.accountName ||
@@ -304,6 +332,8 @@ async function handleBlobPresignedUrl(
prefix = 'chat/'
} else if (uploadType === 'copilot') {
prefix = `${userId}/`
} else if (uploadType === 'profile-pictures') {
prefix = `${userId}/`
}
const uniqueKey = `${prefix}${uuidv4()}-${safeFileName}`
@@ -339,10 +369,10 @@ async function handleBlobPresignedUrl(
const presignedUrl = `${blockBlobClient.url}?${sasToken}`
// For chat images, use direct Blob URLs since they need to be permanently accessible
// For chat images and profile pictures, use direct Blob URLs since they need to be permanently accessible
// For other files, use serve path for access control
const finalPath =
uploadType === 'chat'
uploadType === 'chat' || uploadType === 'profile-pictures'
? blockBlobClient.url
: `/api/files/serve/blob/${encodeURIComponent(uniqueKey)}`
@@ -362,6 +392,9 @@ async function handleBlobPresignedUrl(
} else if (uploadType === 'copilot') {
uploadHeaders['x-ms-meta-purpose'] = 'copilot'
uploadHeaders['x-ms-meta-userid'] = encodeURIComponent(userId || '')
} else if (uploadType === 'profile-pictures') {
uploadHeaders['x-ms-meta-purpose'] = 'profile-pictures'
uploadHeaders['x-ms-meta-userid'] = encodeURIComponent(userId || '')
}
return NextResponse.json({

View File

@@ -4,7 +4,7 @@ import { env, isTruthy } from '@/lib/env'
import { executeInE2B } from '@/lib/execution/e2b'
import { CodeLanguage, DEFAULT_CODE_LANGUAGE, isValidCodeLanguage } from '@/lib/execution/languages'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
export const maxDuration = 60
@@ -533,7 +533,7 @@ function escapeRegExp(string: string): string {
}
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = Date.now()
let stdout = ''
let userCodeStartLine = 3 // Default value for error reporting

View File

@@ -7,6 +7,7 @@ import { getFromEmailAddress } from '@/lib/email/utils'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getEmailDomain } from '@/lib/urls/utils'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('HelpAPI')
@@ -17,7 +18,7 @@ const helpFormSchema = z.object({
})
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get user session

View File

@@ -3,6 +3,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { createErrorResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
import { apiKey as apiKeyTable } from '@/db/schema'
@@ -14,7 +15,7 @@ export async function GET(
{ params }: { params: Promise<{ jobId: string }> }
) {
const { jobId: taskId } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Getting status for task: ${taskId}`)

View File

@@ -1,9 +1,9 @@
import crypto from 'crypto'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { batchChunkOperation, createChunk, queryChunks } from '@/lib/knowledge/chunks/service'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { getUserId } from '@/app/api/auth/oauth/utils'
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
import { calculateCost } from '@/providers/utils'
@@ -34,7 +34,7 @@ export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {
@@ -106,7 +106,7 @@ export async function POST(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {
@@ -229,7 +229,7 @@ export async function PATCH(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {

View File

@@ -8,6 +8,7 @@ import {
updateDocument,
} from '@/lib/knowledge/documents/service'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { checkDocumentAccess, checkDocumentWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('DocumentByIdAPI')
@@ -36,7 +37,7 @@ export async function GET(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {
@@ -79,7 +80,7 @@ export async function PUT(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {
@@ -209,7 +210,7 @@ export async function DELETE(
req: NextRequest,
{ params }: { params: Promise<{ id: string; documentId: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id: knowledgeBaseId, documentId } = await params
try {

View File

@@ -7,6 +7,7 @@ import {
updateKnowledgeBase,
} from '@/lib/knowledge/service'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { checkKnowledgeBaseAccess, checkKnowledgeBaseWriteAccess } from '@/app/api/knowledge/utils'
const logger = createLogger('KnowledgeBaseByIdAPI')
@@ -27,7 +28,7 @@ const UpdateKnowledgeBaseSchema = z.object({
})
export async function GET(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -69,7 +70,7 @@ export async function GET(_req: NextRequest, { params }: { params: Promise<{ id:
}
export async function PUT(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -132,7 +133,7 @@ export async function PUT(req: NextRequest, { params }: { params: Promise<{ id:
}
export async function DELETE(_req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -3,6 +3,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('KnowledgeBaseAPI')
@@ -29,7 +30,7 @@ const CreateKnowledgeBaseSchema = z.object({
})
export async function GET(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -54,7 +55,7 @@ export async function GET(req: NextRequest) {
}
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -34,6 +34,10 @@ vi.mock('@/lib/env', () => ({
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
}))
vi.mock('@/lib/utils', () => ({
generateRequestId: vi.fn(() => 'test-request-id'),
}))
vi.mock('@/lib/documents/utils', () => ({
retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()),
}))

View File

@@ -4,6 +4,7 @@ import { TAG_SLOTS } from '@/lib/knowledge/consts'
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
import { createLogger } from '@/lib/logs/console/logger'
import { estimateTokenCount } from '@/lib/tokenization/estimators'
import { generateRequestId } from '@/lib/utils'
import { getUserId } from '@/app/api/auth/oauth/utils'
import { checkKnowledgeBaseAccess } from '@/app/api/knowledge/utils'
import { calculateCost } from '@/providers/utils'
@@ -57,7 +58,7 @@ const VectorSearchSchema = z
)
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const body = await request.json()

View File

@@ -2,6 +2,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
@@ -10,7 +11,7 @@ const logger = createLogger('LogDetailsByIdAPI')
export const revalidate = 0
export async function GET(_request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -3,44 +3,12 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('LogsAPI')
// Helper function to extract block executions from trace spans
function extractBlockExecutionsFromTraceSpans(traceSpans: any[]): any[] {
const blockExecutions: any[] = []
function processSpan(span: any) {
if (span.blockId) {
blockExecutions.push({
id: span.id,
blockId: span.blockId,
blockName: span.name || '',
blockType: span.type,
startedAt: span.startTime,
endedAt: span.endTime,
durationMs: span.duration || 0,
status: span.status || 'success',
errorMessage: span.output?.error || undefined,
inputData: span.input || {},
outputData: span.output || {},
cost: span.cost || undefined,
metadata: {},
})
}
// Process children recursively
if (span.children && Array.isArray(span.children)) {
span.children.forEach(processSpan)
}
}
traceSpans.forEach(processSpan)
return blockExecutions
}
export const revalidate = 0
const QueryParamsSchema = z.object({
@@ -58,7 +26,7 @@ const QueryParamsSchema = z.object({
})
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -0,0 +1,99 @@
import { and, eq, isNull } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { db } from '@/db'
import { mcpServers } from '@/db/schema'
const logger = createLogger('McpServerRefreshAPI')
export const dynamic = 'force-dynamic'
/**
* POST - Refresh an MCP server connection (requires any workspace permission)
*/
export const POST = withMcpAuth('read')(
async (
request: NextRequest,
{ userId, workspaceId, requestId },
{ params }: { params: { id: string } }
) => {
const serverId = params.id
try {
logger.info(
`[${requestId}] Refreshing MCP server: ${serverId} in workspace: ${workspaceId}`,
{
userId,
}
)
const [server] = await db
.select()
.from(mcpServers)
.where(
and(
eq(mcpServers.id, serverId),
eq(mcpServers.workspaceId, workspaceId),
isNull(mcpServers.deletedAt)
)
)
.limit(1)
if (!server) {
return createMcpErrorResponse(
new Error('Server not found or access denied'),
'Server not found',
404
)
}
let connectionStatus: 'connected' | 'disconnected' | 'error' = 'error'
let toolCount = 0
let lastError: string | null = null
try {
const tools = await mcpService.discoverServerTools(userId, serverId, workspaceId)
connectionStatus = 'connected'
toolCount = tools.length
logger.info(
`[${requestId}] Successfully connected to server ${serverId}, discovered ${toolCount} tools`
)
} catch (error) {
connectionStatus = 'error'
lastError = error instanceof Error ? error.message : 'Connection test failed'
logger.warn(`[${requestId}] Failed to connect to server ${serverId}:`, error)
}
const [refreshedServer] = await db
.update(mcpServers)
.set({
lastToolsRefresh: new Date(),
connectionStatus,
lastError,
lastConnected: connectionStatus === 'connected' ? new Date() : server.lastConnected,
toolCount,
updatedAt: new Date(),
})
.where(eq(mcpServers.id, serverId))
.returning()
logger.info(`[${requestId}] Successfully refreshed MCP server: ${serverId}`)
return createMcpSuccessResponse({
status: connectionStatus,
toolCount,
lastConnected: refreshedServer?.lastConnected?.toISOString() || null,
error: lastError,
})
} catch (error) {
logger.error(`[${requestId}] Error refreshing MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to refresh MCP server'),
'Failed to refresh MCP server',
500
)
}
}
)

View File

@@ -0,0 +1,92 @@
import { and, eq, isNull } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import { validateMcpServerUrl } from '@/lib/mcp/url-validator'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { db } from '@/db'
import { mcpServers } from '@/db/schema'
const logger = createLogger('McpServerAPI')
export const dynamic = 'force-dynamic'
/**
* PATCH - Update an MCP server in the workspace (requires write or admin permission)
*/
export const PATCH = withMcpAuth('write')(
async (
request: NextRequest,
{ userId, workspaceId, requestId },
{ params }: { params: { id: string } }
) => {
const serverId = params.id
try {
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Updating MCP server: ${serverId} in workspace: ${workspaceId}`, {
userId,
updates: Object.keys(body).filter((k) => k !== 'workspaceId'),
})
// Validate URL if being updated
if (
body.url &&
(body.transport === 'http' ||
body.transport === 'sse' ||
body.transport === 'streamable-http')
) {
const urlValidation = validateMcpServerUrl(body.url)
if (!urlValidation.isValid) {
return createMcpErrorResponse(
new Error(`Invalid MCP server URL: ${urlValidation.error}`),
'Invalid server URL',
400
)
}
body.url = urlValidation.normalizedUrl
}
// Remove workspaceId from body to prevent it from being updated
const { workspaceId: _, ...updateData } = body
const [updatedServer] = await db
.update(mcpServers)
.set({
...updateData,
updatedAt: new Date(),
})
.where(
and(
eq(mcpServers.id, serverId),
eq(mcpServers.workspaceId, workspaceId),
isNull(mcpServers.deletedAt)
)
)
.returning()
if (!updatedServer) {
return createMcpErrorResponse(
new Error('Server not found or access denied'),
'Server not found',
404
)
}
// Clear MCP service cache after update
mcpService.clearCache(workspaceId)
logger.info(`[${requestId}] Successfully updated MCP server: ${serverId}`)
return createMcpSuccessResponse({ server: updatedServer })
} catch (error) {
logger.error(`[${requestId}] Error updating MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to update MCP server'),
'Failed to update MCP server',
500
)
}
}
)

View File

@@ -0,0 +1,166 @@
import { and, eq, isNull } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import type { McpTransport } from '@/lib/mcp/types'
import { validateMcpServerUrl } from '@/lib/mcp/url-validator'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
import { db } from '@/db'
import { mcpServers } from '@/db/schema'
const logger = createLogger('McpServersAPI')
export const dynamic = 'force-dynamic'
/**
* Check if transport type requires a URL
*/
function isUrlBasedTransport(transport: McpTransport): boolean {
return transport === 'http' || transport === 'sse' || transport === 'streamable-http'
}
/**
* GET - List all registered MCP servers for the workspace
*/
export const GET = withMcpAuth('read')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
logger.info(`[${requestId}] Listing MCP servers for workspace ${workspaceId}`)
const servers = await db
.select()
.from(mcpServers)
.where(and(eq(mcpServers.workspaceId, workspaceId), isNull(mcpServers.deletedAt)))
logger.info(
`[${requestId}] Listed ${servers.length} MCP servers for workspace ${workspaceId}`
)
return createMcpSuccessResponse({ servers })
} catch (error) {
logger.error(`[${requestId}] Error listing MCP servers:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to list MCP servers'),
'Failed to list MCP servers',
500
)
}
}
)
/**
* POST - Register a new MCP server for the workspace (requires write permission)
*/
export const POST = withMcpAuth('write')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Registering new MCP server:`, {
name: body.name,
transport: body.transport,
workspaceId,
})
if (!body.name || !body.transport) {
return createMcpErrorResponse(
new Error('Missing required fields: name or transport'),
'Missing required fields',
400
)
}
if (isUrlBasedTransport(body.transport) && body.url) {
const urlValidation = validateMcpServerUrl(body.url)
if (!urlValidation.isValid) {
return createMcpErrorResponse(
new Error(`Invalid MCP server URL: ${urlValidation.error}`),
'Invalid server URL',
400
)
}
body.url = urlValidation.normalizedUrl
}
const serverId = body.id || crypto.randomUUID()
await db
.insert(mcpServers)
.values({
id: serverId,
workspaceId,
createdBy: userId,
name: body.name,
description: body.description,
transport: body.transport,
url: body.url,
headers: body.headers || {},
timeout: body.timeout || 30000,
retries: body.retries || 3,
enabled: body.enabled !== false,
createdAt: new Date(),
updatedAt: new Date(),
})
.returning()
mcpService.clearCache(workspaceId)
logger.info(`[${requestId}] Successfully registered MCP server: ${body.name}`)
return createMcpSuccessResponse({ serverId }, 201)
} catch (error) {
logger.error(`[${requestId}] Error registering MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to register MCP server'),
'Failed to register MCP server',
500
)
}
}
)
/**
* DELETE - Delete an MCP server from the workspace (requires admin permission)
*/
export const DELETE = withMcpAuth('admin')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const { searchParams } = new URL(request.url)
const serverId = searchParams.get('serverId')
if (!serverId) {
return createMcpErrorResponse(
new Error('serverId parameter is required'),
'Missing required parameter',
400
)
}
logger.info(`[${requestId}] Deleting MCP server: ${serverId} from workspace: ${workspaceId}`)
const [deletedServer] = await db
.delete(mcpServers)
.where(and(eq(mcpServers.id, serverId), eq(mcpServers.workspaceId, workspaceId)))
.returning()
if (!deletedServer) {
return createMcpErrorResponse(
new Error('Server not found or access denied'),
'Server not found',
404
)
}
mcpService.clearCache(workspaceId)
logger.info(`[${requestId}] Successfully deleted MCP server: ${serverId}`)
return createMcpSuccessResponse({ message: `Server ${serverId} deleted successfully` })
} catch (error) {
logger.error(`[${requestId}] Error deleting MCP server:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to delete MCP server'),
'Failed to delete MCP server',
500
)
}
}
)

View File

@@ -0,0 +1,209 @@
import type { NextRequest } from 'next/server'
import { getEffectiveDecryptedEnv } from '@/lib/environment/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { McpClient } from '@/lib/mcp/client'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import type { McpServerConfig, McpTransport } from '@/lib/mcp/types'
import { validateMcpServerUrl } from '@/lib/mcp/url-validator'
import { createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('McpServerTestAPI')
export const dynamic = 'force-dynamic'
/**
* Check if transport type requires a URL
*/
function isUrlBasedTransport(transport: McpTransport): boolean {
return transport === 'http' || transport === 'sse' || transport === 'streamable-http'
}
/**
* Resolve environment variables in strings
*/
function resolveEnvVars(value: string, envVars: Record<string, string>): string {
const envMatches = value.match(/\{\{([^}]+)\}\}/g)
if (!envMatches) return value
let resolvedValue = value
for (const match of envMatches) {
const envKey = match.slice(2, -2).trim()
const envValue = envVars[envKey]
if (envValue === undefined) {
logger.warn(`Environment variable "${envKey}" not found in MCP server test`)
continue
}
resolvedValue = resolvedValue.replace(match, envValue)
}
return resolvedValue
}
interface TestConnectionRequest {
name: string
transport: McpTransport
url?: string
headers?: Record<string, string>
timeout?: number
workspaceId: string
}
interface TestConnectionResult {
success: boolean
error?: string
serverInfo?: {
name: string
version: string
}
negotiatedVersion?: string
supportedCapabilities?: string[]
toolCount?: number
warnings?: string[]
}
/**
* POST - Test connection to an MCP server before registering it
*/
export const POST = withMcpAuth('write')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const body: TestConnectionRequest = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] Testing MCP server connection:`, {
name: body.name,
transport: body.transport,
url: body.url ? `${body.url.substring(0, 50)}...` : undefined, // Partial URL for security
workspaceId,
})
if (!body.name || !body.transport) {
return createMcpErrorResponse(
new Error('Missing required fields: name and transport are required'),
'Missing required fields',
400
)
}
if (isUrlBasedTransport(body.transport)) {
if (!body.url) {
return createMcpErrorResponse(
new Error('URL is required for HTTP-based transports'),
'Missing required URL',
400
)
}
const urlValidation = validateMcpServerUrl(body.url)
if (!urlValidation.isValid) {
return createMcpErrorResponse(
new Error(`Invalid MCP server URL: ${urlValidation.error}`),
'Invalid server URL',
400
)
}
body.url = urlValidation.normalizedUrl
}
let resolvedUrl = body.url
let resolvedHeaders = body.headers || {}
try {
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
if (resolvedUrl) {
resolvedUrl = resolveEnvVars(resolvedUrl, envVars)
}
const resolvedHeadersObj: Record<string, string> = {}
for (const [key, value] of Object.entries(resolvedHeaders)) {
resolvedHeadersObj[key] = resolveEnvVars(value, envVars)
}
resolvedHeaders = resolvedHeadersObj
} catch (envError) {
logger.warn(
`[${requestId}] Failed to resolve environment variables, using raw values:`,
envError
)
}
const testConfig: McpServerConfig = {
id: `test-${requestId}`,
name: body.name,
transport: body.transport,
url: resolvedUrl,
headers: resolvedHeaders,
timeout: body.timeout || 10000,
retries: 1, // Only one retry for tests
enabled: true,
}
const testSecurityPolicy = {
requireConsent: false,
auditLevel: 'none' as const,
maxToolExecutionsPerHour: 0,
}
const result: TestConnectionResult = { success: false }
let client: McpClient | null = null
try {
client = new McpClient(testConfig, testSecurityPolicy)
await client.connect()
result.success = true
result.negotiatedVersion = client.getNegotiatedVersion()
try {
const tools = await client.listTools()
result.toolCount = tools.length
} catch (toolError) {
logger.warn(`[${requestId}] Could not list tools from test server:`, toolError)
result.warnings = result.warnings || []
result.warnings.push('Could not list tools from server')
}
const clientVersionInfo = McpClient.getVersionInfo()
if (result.negotiatedVersion !== clientVersionInfo.preferred) {
result.warnings = result.warnings || []
result.warnings.push(
`Server uses protocol version '${result.negotiatedVersion}' instead of preferred '${clientVersionInfo.preferred}'`
)
}
logger.info(`[${requestId}] MCP server test successful:`, {
name: body.name,
negotiatedVersion: result.negotiatedVersion,
toolCount: result.toolCount,
capabilities: result.supportedCapabilities,
})
} catch (error) {
logger.warn(`[${requestId}] MCP server test failed:`, error)
result.success = false
if (error instanceof Error) {
result.error = error.message
} else {
result.error = 'Unknown connection error'
}
} finally {
if (client) {
try {
await client.disconnect()
} catch (disconnectError) {
logger.debug(`[${requestId}] Test client disconnect error (expected):`, disconnectError)
}
}
}
return createMcpSuccessResponse(result, result.success ? 200 : 400)
} catch (error) {
logger.error(`[${requestId}] Error testing MCP server connection:`, error)
return createMcpErrorResponse(
error instanceof Error ? error : new Error('Failed to test server connection'),
'Failed to test server connection',
500
)
}
}
)

View File

@@ -0,0 +1,122 @@
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import type { McpToolDiscoveryResponse } from '@/lib/mcp/types'
import { categorizeError, createMcpErrorResponse, createMcpSuccessResponse } from '@/lib/mcp/utils'
const logger = createLogger('McpToolDiscoveryAPI')
export const dynamic = 'force-dynamic'
/**
* GET - Discover all tools from user's MCP servers
*/
export const GET = withMcpAuth('read')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const { searchParams } = new URL(request.url)
const serverId = searchParams.get('serverId')
const forceRefresh = searchParams.get('refresh') === 'true'
logger.info(`[${requestId}] Discovering MCP tools for user ${userId}`, {
serverId,
workspaceId,
forceRefresh,
})
let tools
if (serverId) {
tools = await mcpService.discoverServerTools(userId, serverId, workspaceId)
} else {
tools = await mcpService.discoverTools(userId, workspaceId, forceRefresh)
}
const byServer: Record<string, number> = {}
for (const tool of tools) {
byServer[tool.serverId] = (byServer[tool.serverId] || 0) + 1
}
const responseData: McpToolDiscoveryResponse = {
tools,
totalCount: tools.length,
byServer,
}
logger.info(
`[${requestId}] Discovered ${tools.length} tools from ${Object.keys(byServer).length} servers`
)
return createMcpSuccessResponse(responseData)
} catch (error) {
logger.error(`[${requestId}] Error discovering MCP tools:`, error)
const { message, status } = categorizeError(error)
return createMcpErrorResponse(new Error(message), 'Failed to discover MCP tools', status)
}
}
)
/**
* POST - Refresh tool discovery for specific servers
*/
export const POST = withMcpAuth('read')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
const { serverIds } = body
if (!Array.isArray(serverIds)) {
return createMcpErrorResponse(
new Error('serverIds must be an array'),
'Invalid request format',
400
)
}
logger.info(
`[${requestId}] Refreshing tool discovery for user ${userId}, servers:`,
serverIds
)
const results = await Promise.allSettled(
serverIds.map(async (serverId: string) => {
const tools = await mcpService.discoverServerTools(userId, serverId, workspaceId)
return { serverId, toolCount: tools.length }
})
)
const successes: Array<{ serverId: string; toolCount: number }> = []
const failures: Array<{ serverId: string; error: string }> = []
results.forEach((result, index) => {
const serverId = serverIds[index]
if (result.status === 'fulfilled') {
successes.push(result.value)
} else {
failures.push({
serverId,
error: result.reason instanceof Error ? result.reason.message : 'Unknown error',
})
}
})
const responseData = {
refreshed: successes,
failed: failures,
summary: {
total: serverIds.length,
successful: successes.length,
failed: failures.length,
},
}
logger.info(
`[${requestId}] Tool discovery refresh completed: ${successes.length}/${serverIds.length} successful`
)
return createMcpSuccessResponse(responseData)
} catch (error) {
logger.error(`[${requestId}] Error refreshing tool discovery:`, error)
const { message, status } = categorizeError(error)
return createMcpErrorResponse(new Error(message), 'Failed to refresh tool discovery', status)
}
}
)

View File

@@ -0,0 +1,252 @@
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
import { mcpService } from '@/lib/mcp/service'
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
import {
categorizeError,
createMcpErrorResponse,
createMcpSuccessResponse,
MCP_CONSTANTS,
validateStringParam,
} from '@/lib/mcp/utils'
const logger = createLogger('McpToolExecutionAPI')
export const dynamic = 'force-dynamic'
// Type definitions for improved type safety
interface SchemaProperty {
type: 'string' | 'number' | 'boolean' | 'object' | 'array'
description?: string
enum?: unknown[]
format?: string
items?: SchemaProperty
properties?: Record<string, SchemaProperty>
}
interface ToolExecutionResult {
success: boolean
output?: McpToolResult
error?: string
}
/**
* Type guard to safely check if a schema property has a type field
*/
function hasType(prop: unknown): prop is SchemaProperty {
return typeof prop === 'object' && prop !== null && 'type' in prop
}
/**
* POST - Execute a tool on an MCP server
*/
export const POST = withMcpAuth('read')(
async (request: NextRequest, { userId, workspaceId, requestId }) => {
try {
const body = getParsedBody(request) || (await request.json())
logger.info(`[${requestId}] MCP tool execution request received`, {
hasAuthHeader: !!request.headers.get('authorization'),
authHeaderType: request.headers.get('authorization')?.substring(0, 10),
bodyKeys: Object.keys(body),
serverId: body.serverId,
toolName: body.toolName,
hasWorkflowId: !!body.workflowId,
workflowId: body.workflowId,
userId: userId,
})
const { serverId, toolName, arguments: args } = body
const serverIdValidation = validateStringParam(serverId, 'serverId')
if (!serverIdValidation.isValid) {
logger.warn(`[${requestId}] Invalid serverId: ${serverId}`)
return createMcpErrorResponse(new Error(serverIdValidation.error), 'Invalid serverId', 400)
}
const toolNameValidation = validateStringParam(toolName, 'toolName')
if (!toolNameValidation.isValid) {
logger.warn(`[${requestId}] Invalid toolName: ${toolName}`)
return createMcpErrorResponse(new Error(toolNameValidation.error), 'Invalid toolName', 400)
}
logger.info(
`[${requestId}] Executing tool ${toolName} on server ${serverId} for user ${userId} in workspace ${workspaceId}`
)
let tool = null
try {
const tools = await mcpService.discoverServerTools(userId, serverId, workspaceId)
tool = tools.find((t) => t.name === toolName)
if (!tool) {
return createMcpErrorResponse(
new Error(
`Tool ${toolName} not found on server ${serverId}. Available tools: ${tools.map((t) => t.name).join(', ')}`
),
'Tool not found',
404
)
}
// Parse array arguments based on tool schema
if (tool.inputSchema?.properties) {
for (const [paramName, paramSchema] of Object.entries(tool.inputSchema.properties)) {
const schema = paramSchema as any
if (
schema.type === 'array' &&
args[paramName] !== undefined &&
typeof args[paramName] === 'string'
) {
const stringValue = args[paramName].trim()
if (stringValue) {
try {
// Try to parse as JSON first (handles ["item1", "item2"])
const parsed = JSON.parse(stringValue)
if (Array.isArray(parsed)) {
args[paramName] = parsed
} else {
// JSON parsed but not an array, wrap in array
args[paramName] = [parsed]
}
} catch (error) {
// JSON parsing failed - treat as comma-separated if contains commas, otherwise single item
if (stringValue.includes(',')) {
args[paramName] = stringValue
.split(',')
.map((item) => item.trim())
.filter((item) => item)
} else {
// Single item - wrap in array since schema expects array
args[paramName] = [stringValue]
}
}
} else {
// Empty string becomes empty array
args[paramName] = []
}
}
}
}
} catch (error) {
logger.warn(
`[${requestId}] Failed to discover tools for validation, proceeding anyway:`,
error
)
}
if (tool) {
const validationError = validateToolArguments(tool, args)
if (validationError) {
logger.warn(`[${requestId}] Tool validation failed: ${validationError}`)
return createMcpErrorResponse(
new Error(`Invalid arguments for tool ${toolName}: ${validationError}`),
'Invalid tool arguments',
400
)
}
}
const toolCall: McpToolCall = {
name: toolName,
arguments: args || {},
}
const result = await Promise.race([
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
new Promise<never>((_, reject) =>
setTimeout(
() => reject(new Error('Tool execution timeout')),
MCP_CONSTANTS.EXECUTION_TIMEOUT
)
),
])
const transformedResult = transformToolResult(result)
if (result.isError) {
logger.warn(`[${requestId}] Tool execution returned error for ${toolName} on ${serverId}`)
return createMcpErrorResponse(
transformedResult,
transformedResult.error || 'Tool execution failed',
400
)
}
logger.info(`[${requestId}] Successfully executed tool ${toolName} on server ${serverId}`)
return createMcpSuccessResponse(transformedResult)
} catch (error) {
logger.error(`[${requestId}] Error executing MCP tool:`, error)
const { message, status } = categorizeError(error)
return createMcpErrorResponse(new Error(message), message, status)
}
}
)
/**
* Validate tool arguments against schema
*/
function validateToolArguments(tool: McpTool, args: Record<string, unknown>): string | null {
if (!tool.inputSchema) {
return null // No schema to validate against
}
const schema = tool.inputSchema
if (schema.required && Array.isArray(schema.required)) {
for (const requiredProp of schema.required) {
if (!(requiredProp in (args || {}))) {
return `Missing required property: ${requiredProp}`
}
}
}
if (schema.properties && args) {
for (const [propName, propSchema] of Object.entries(schema.properties)) {
const propValue = args[propName]
if (propValue !== undefined && hasType(propSchema)) {
const expectedType = propSchema.type
const actualType = typeof propValue
if (expectedType === 'string' && actualType !== 'string') {
return `Property ${propName} must be a string`
}
if (expectedType === 'number' && actualType !== 'number') {
return `Property ${propName} must be a number`
}
if (expectedType === 'boolean' && actualType !== 'boolean') {
return `Property ${propName} must be a boolean`
}
if (
expectedType === 'object' &&
(actualType !== 'object' || propValue === null || Array.isArray(propValue))
) {
return `Property ${propName} must be an object`
}
if (expectedType === 'array' && !Array.isArray(propValue)) {
return `Property ${propName} must be an array`
}
}
}
}
return null
}
/**
* Transform MCP tool result to platform format
*/
function transformToolResult(result: McpToolResult): ToolExecutionResult {
if (result.isError) {
return {
success: false,
error: result.content?.[0]?.text || 'Tool execution failed',
}
}
return {
success: true,
output: result,
}
}

View File

@@ -1,6 +1,7 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { memory } from '@/db/schema'
@@ -13,7 +14,7 @@ export const runtime = 'nodejs'
* GET handler for retrieving a specific memory by ID
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -85,7 +86,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -156,7 +157,7 @@ export async function DELETE(
* PUT handler for updating a specific memory
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -1,6 +1,7 @@
import { and, eq, isNull, like } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { memory } from '@/db/schema'
@@ -18,7 +19,7 @@ export const runtime = 'nodejs'
* - workflowId: Filter by workflow ID (required)
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.info(`[${requestId}] Processing memory search request`)
@@ -101,7 +102,7 @@ export async function GET(request: NextRequest) {
* - workflowId: ID of the workflow this memory belongs to
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.info(`[${requestId}] Processing memory creation request`)

View File

@@ -1,5 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import type { StreamingExecution } from '@/executor/types'
import { executeProviderRequest } from '@/providers'
import { getApiKey } from '@/providers/utils'
@@ -12,7 +13,7 @@ export const dynamic = 'force-dynamic'
* Server-side proxy for provider requests
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = Date.now()
try {
@@ -36,6 +37,7 @@ export async function POST(request: NextRequest) {
azureApiVersion,
responseFormat,
workflowId,
workspaceId,
stream,
messages,
environmentVariables,
@@ -104,6 +106,7 @@ export async function POST(request: NextRequest) {
azureApiVersion,
responseFormat,
workflowId,
workspaceId,
stream,
messages,
environmentVariables,

View File

@@ -1,6 +1,7 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { validateImageUrl } from '@/lib/security/url-validation'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('ImageProxyAPI')
@@ -11,7 +12,7 @@ const logger = createLogger('ImageProxyAPI')
export async function GET(request: NextRequest) {
const url = new URL(request.url)
const imageUrl = url.searchParams.get('url')
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
if (!imageUrl) {
logger.error(`[${requestId}] Missing 'url' parameter`)

View File

@@ -2,6 +2,7 @@ import { NextResponse } from 'next/server'
import { isDev } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { validateProxyUrl } from '@/lib/security/url-validation'
import { generateRequestId } from '@/lib/utils'
import { executeTool } from '@/tools'
import { getTool, validateRequiredParametersAfterMerge } from '@/tools/utils'
@@ -74,7 +75,7 @@ const createErrorResponse = (error: any, status = 500, additionalData = {}) => {
export async function GET(request: Request) {
const url = new URL(request.url)
const targetUrl = url.searchParams.get('url')
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
if (!targetUrl) {
logger.error(`[${requestId}] Missing 'url' parameter`)
@@ -167,7 +168,7 @@ export async function GET(request: Request) {
}
export async function POST(request: Request) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = new Date()
const startTimeISO = startTime.toISOString()

View File

@@ -1,9 +1,9 @@
import crypto from 'crypto'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { workflow, workflowSchedule } from '@/db/schema'
@@ -18,7 +18,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { id } = await params
@@ -85,7 +85,7 @@ export async function DELETE(
* Update a schedule - can be used to reactivate a disabled schedule
*/
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { id } = await params

View File

@@ -3,13 +3,14 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { workflow, workflowSchedule } from '@/db/schema'
const logger = createLogger('ScheduleStatusAPI')
export async function GET(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
const scheduleId = id

View File

@@ -15,7 +15,7 @@ import {
getScheduleTimeValues,
getSubBlockValue,
} from '@/lib/schedules/utils'
import { decryptSecret } from '@/lib/utils'
import { decryptSecret, generateRequestId } from '@/lib/utils'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { updateWorkflowRunCounts } from '@/lib/workflows/utils'
import { db } from '@/db'
@@ -23,10 +23,8 @@ import { userStats, workflow, workflowSchedule } from '@/db/schema'
import { Executor } from '@/executor'
import { Serializer } from '@/serializer'
import { RateLimiter } from '@/services/queue'
import type { SubscriptionPlan } from '@/services/queue/types'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
// Add dynamic export to prevent caching
export const dynamic = 'force-dynamic'
const logger = createLogger('ScheduledExecuteAPI')
@@ -67,7 +65,7 @@ const runningExecutions = new Set<string>()
export async function GET() {
logger.info(`Scheduled execution triggered at ${new Date().toISOString()}`)
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const now = new Date()
let dueSchedules: (typeof workflowSchedule.$inferSelect)[] = []
@@ -112,8 +110,6 @@ export async function GET() {
// Check rate limits for scheduled execution (checks both personal and org subscriptions)
const userSubscription = await getHighestPrioritySubscription(workflowRecord.userId)
const subscriptionPlan = (userSubscription?.plan || 'free') as SubscriptionPlan
const rateLimiter = new RateLimiter()
const rateLimitCheck = await rateLimiter.checkRateLimitWithSubscription(
workflowRecord.userId,

View File

@@ -1,4 +1,3 @@
import crypto from 'crypto'
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
@@ -13,6 +12,7 @@ import {
getSubBlockValue,
validateCronExpression,
} from '@/lib/schedules/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { workflow, workflowSchedule } from '@/db/schema'
@@ -65,7 +65,7 @@ function hasValidScheduleConfig(
* Get schedule information for a workflow
*/
export async function GET(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const url = new URL(req.url)
const workflowId = url.searchParams.get('workflowId')
const blockId = url.searchParams.get('blockId')
@@ -165,7 +165,7 @@ export async function GET(req: NextRequest) {
* Create or update a schedule for a workflow
*/
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -4,6 +4,7 @@ import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { hasAdminPermission } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { templates, workflow } from '@/db/schema'
@@ -13,7 +14,7 @@ export const revalidate = 0
// GET /api/templates/[id] - Retrieve a single template by ID
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -77,7 +78,7 @@ const updateTemplateSchema = z.object({
// PUT /api/templates/[id] - Update a template
export async function PUT(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -163,7 +164,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { templateStars, templates } from '@/db/schema'
@@ -13,7 +14,7 @@ export const revalidate = 0
// GET /api/templates/[id]/star - Check if user has starred this template
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -47,7 +48,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
// POST /api/templates/[id]/star - Add a star to the template
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -123,7 +124,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { templates, workflow, workflowBlocks, workflowEdges } from '@/db/schema'
@@ -13,7 +14,7 @@ export const revalidate = 0
// POST /api/templates/[id]/use - Use a template (increment views and create workflow)
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -4,6 +4,7 @@ import { v4 as uuidv4 } from 'uuid'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { templateStars, templates, workflow } from '@/db/schema'
@@ -82,7 +83,7 @@ const QueryParamsSchema = z.object({
// GET /api/templates - Retrieve templates
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -185,7 +186,7 @@ export async function GET(request: NextRequest) {
// POST /api/templates - Create a new template
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { getUserId } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { customTools } from '@/db/schema'
@@ -33,7 +34,7 @@ const CustomToolSchema = z.object({
// GET - Fetch all custom tools for the user
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const searchParams = request.nextUrl.searchParams
const workflowId = searchParams.get('workflowId')
@@ -69,7 +70,7 @@ export async function GET(request: NextRequest) {
// POST - Create or update custom tools
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -162,7 +163,7 @@ export async function POST(req: NextRequest) {
// DELETE - Delete a custom tool by ID
export async function DELETE(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const searchParams = request.nextUrl.searchParams
const toolId = searchParams.get('id')

View File

@@ -85,7 +85,8 @@ export async function POST(request: Request) {
logger.info(`Fetching all Discord channels for server: ${serverId}`)
// Fetch all channels from Discord API
// Listing guild channels with a bot token is allowed if the bot is in the guild.
// Keep the request, but if unauthorized, return an empty list so the selector doesn't hard fail.
const response = await fetch(`https://discord.com/api/v10/guilds/${serverId}/channels`, {
method: 'GET',
headers: {
@@ -95,20 +96,14 @@ export async function POST(request: Request) {
})
if (!response.ok) {
logger.error('Discord API error:', {
status: response.status,
statusText: response.statusText,
})
let errorMessage
try {
const errorData = await response.json()
logger.error('Error details:', errorData)
errorMessage = errorData.message || `Failed to fetch channels (${response.status})`
} catch (_e) {
errorMessage = `Failed to fetch channels: ${response.status} ${response.statusText}`
}
return NextResponse.json({ error: errorMessage }, { status: response.status })
logger.warn(
'Discord API returned non-OK for channels; returning empty list to avoid UX break',
{
status: response.status,
statusText: response.statusText,
}
)
return NextResponse.json({ channels: [] })
}
const channels = (await response.json()) as DiscordChannel[]

View File

@@ -64,46 +64,14 @@ export async function POST(request: Request) {
})
}
// Otherwise, fetch all servers the bot is in
logger.info('Fetching all Discord servers')
const response = await fetch('https://discord.com/api/v10/users/@me/guilds', {
method: 'GET',
headers: {
Authorization: `Bot ${botToken}`,
'Content-Type': 'application/json',
},
})
if (!response.ok) {
logger.error('Discord API error:', {
status: response.status,
statusText: response.statusText,
})
let errorMessage
try {
const errorData = await response.json()
logger.error('Error details:', errorData)
errorMessage = errorData.message || `Failed to fetch servers (${response.status})`
} catch (_e) {
errorMessage = `Failed to fetch servers: ${response.status} ${response.statusText}`
}
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const servers = (await response.json()) as DiscordServer[]
logger.info(`Successfully fetched ${servers.length} servers`)
return NextResponse.json({
servers: servers.map((server: DiscordServer) => ({
id: server.id,
name: server.name,
icon: server.icon
? `https://cdn.discordapp.com/icons/${server.id}/${server.icon}.png`
: null,
})),
})
// Listing guilds via REST requires a user OAuth2 access token with the 'guilds' scope.
// A bot token cannot call /users/@me/guilds and will return 401.
// Since this selector only has a bot token, return an empty list instead of erroring
// and let users provide a Server ID in advanced mode.
logger.info(
'Skipping guild listing: bot token cannot list /users/@me/guilds; returning empty list'
)
return NextResponse.json({ servers: [] })
} catch (error) {
logger.error('Error processing request:', error)
return NextResponse.json(

View File

@@ -1,8 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleDriveFileAPI')
@@ -11,11 +11,10 @@ const logger = createLogger('GoogleDriveFileAPI')
* Get a single file from Google Drive
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8) // Generate a short request ID for correlation
const requestId = generateRequestId()
logger.info(`[${requestId}] Google Drive file request received`)
try {
// Get the credential ID and file ID from the query params
const { searchParams } = new URL(request.url)
const credentialId = searchParams.get('credentialId')
const fileId = searchParams.get('fileId')
@@ -31,7 +30,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(
credentialId,
authz.credentialOwnerUserId,
@@ -42,7 +40,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Fetch the file from Google Drive API
logger.info(`[${requestId}] Fetching file ${fileId} from Google Drive API`)
const response = await fetch(
`https://www.googleapis.com/drive/v3/files/${fileId}?fields=id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,exportLinks,shortcutDetails&supportsAllDrives=true`,
@@ -69,7 +66,6 @@ export async function GET(request: NextRequest) {
const file = await response.json()
// In case of Google Docs, Sheets, etc., provide the export links
const exportFormats: { [key: string]: string } = {
'application/vnd.google-apps.document': 'application/pdf', // Google Docs to PDF
'application/vnd.google-apps.spreadsheet':
@@ -77,7 +73,6 @@ export async function GET(request: NextRequest) {
'application/vnd.google-apps.presentation': 'application/pdf', // Google Slides to PDF
}
// Resolve shortcuts transparently for UI stability
if (
file.mimeType === 'application/vnd.google-apps.shortcut' &&
file.shortcutDetails?.targetId
@@ -105,20 +100,16 @@ export async function GET(request: NextRequest) {
}
}
// If the file is a Google Docs, Sheets, or Slides file, we need to provide the export link
if (file.mimeType.startsWith('application/vnd.google-apps.')) {
const format = exportFormats[file.mimeType] || 'application/pdf'
if (!file.exportLinks) {
// If export links are not available in the response, try to construct one
file.downloadUrl = `https://www.googleapis.com/drive/v3/files/${file.id}/export?mimeType=${encodeURIComponent(
format
)}`
} else {
// Use the export link from the response if available
file.downloadUrl = file.exportLinks[format]
}
} else {
// For regular files, use the download link
file.downloadUrl = `https://www.googleapis.com/drive/v3/files/${file.id}?alt=media`
}

View File

@@ -2,8 +2,8 @@ import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleDriveFilesAPI')
@@ -12,20 +12,17 @@ const logger = createLogger('GoogleDriveFilesAPI')
* Get files from Google Drive
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8) // Generate a short request ID for correlation
const requestId = generateRequestId()
logger.info(`[${requestId}] Google Drive files request received`)
try {
// Get the session
const session = await getSession()
// Check if the user is authenticated
if (!session?.user?.id) {
logger.warn(`[${requestId}] Unauthenticated request rejected`)
return NextResponse.json({ error: 'User not authenticated' }, { status: 401 })
}
// Get the credential ID from the query params
const { searchParams } = new URL(request.url)
const credentialId = searchParams.get('credentialId')
const mimeType = searchParams.get('mimeType')
@@ -38,14 +35,12 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Credential ID is required' }, { status: 400 })
}
// Authorize use of the credential (supports collaborator credentials via workflow)
const authz = await authorizeCredentialUse(request, { credentialId: credentialId!, workflowId })
if (!authz.ok || !authz.credentialOwnerUserId) {
logger.warn(`[${requestId}] Unauthorized credential access attempt`, authz)
return NextResponse.json({ error: authz.error || 'Unauthorized' }, { status: 403 })
}
// Refresh access token if needed using the utility function
const accessToken = await refreshAccessTokenIfNeeded(
credentialId!,
authz.credentialOwnerUserId,
@@ -56,7 +51,6 @@ export async function GET(request: NextRequest) {
return NextResponse.json({ error: 'Failed to obtain valid access token' }, { status: 401 })
}
// Build Drive 'q' expression safely
const qParts: string[] = ['trashed = false']
if (folderId) {
qParts.push(`'${folderId.replace(/'/g, "\\'")}' in parents`)
@@ -69,7 +63,6 @@ export async function GET(request: NextRequest) {
}
const q = encodeURIComponent(qParts.join(' and '))
// Fetch files from Google Drive API with shared drives support
const response = await fetch(
`https://www.googleapis.com/drive/v3/files?q=${q}&supportsAllDrives=true&includeItemsFromAllDrives=true&spaces=drive&fields=files(id,name,mimeType,iconLink,webViewLink,thumbnailLink,createdTime,modifiedTime,size,owners,parents)`,
{

View File

@@ -2,6 +2,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -11,7 +12,7 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('GmailLabelAPI')
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -2,10 +2,10 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
export const dynamic = 'force-dynamic'
const logger = createLogger('GmailLabelsAPI')
@@ -19,7 +19,7 @@ interface GmailLabel {
}
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -1,8 +1,8 @@
import { type NextRequest, NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
const logger = createLogger('GoogleCalendarAPI')
@@ -21,7 +21,7 @@ interface CalendarListItem {
* Get calendars from Google Calendar
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8) // Generate a short request ID for correlation
const requestId = generateRequestId()
logger.info(`[${requestId}] Google Calendar calendars request received`)
try {

View File

@@ -6,17 +6,32 @@ export const dynamic = 'force-dynamic'
const logger = createLogger('JiraIssuesAPI')
// Helper functions
const createErrorResponse = async (response: Response, defaultMessage: string) => {
try {
const errorData = await response.json()
return errorData.message || errorData.errorMessages?.[0] || defaultMessage
} catch {
return defaultMessage
}
}
const validateRequiredParams = (domain: string | null, accessToken: string | null) => {
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
return null
}
export async function POST(request: Request) {
try {
const { domain, accessToken, issueKeys = [], cloudId: providedCloudId } = await request.json()
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
const validationError = validateRequiredParams(domain || null, accessToken || null)
if (validationError) return validationError
if (issueKeys.length === 0) {
logger.info('No issue keys provided, returning empty result')
@@ -24,7 +39,7 @@ export async function POST(request: Request) {
}
// Use provided cloudId or fetch it if not provided
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
const cloudId = providedCloudId || (await getJiraCloudId(domain!, accessToken!))
// Build the URL using cloudId for Jira API
const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/bulkfetch`
@@ -53,47 +68,24 @@ export async function POST(request: Request) {
if (!response.ok) {
logger.error(`Jira API error: ${response.status} ${response.statusText}`)
let errorMessage
try {
const errorData = await response.json()
logger.error('Error details:', JSON.stringify(errorData, null, 2))
errorMessage = errorData.message || `Failed to fetch Jira issues (${response.status})`
} catch (e) {
logger.error('Could not parse error response as JSON:', e)
try {
const _text = await response.text()
errorMessage = `Failed to fetch Jira issues: ${response.status} ${response.statusText}`
} catch (_textError) {
errorMessage = `Failed to fetch Jira issues: ${response.status} ${response.statusText}`
}
}
const errorMessage = await createErrorResponse(
response,
`Failed to fetch Jira issues (${response.status})`
)
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const data = await response.json()
const issues = (data.issues || []).map((issue: any) => ({
id: issue.key,
name: issue.fields.summary,
mimeType: 'jira/issue',
url: `https://${domain}/browse/${issue.key}`,
modifiedTime: issue.fields.updated,
webViewLink: `https://${domain}/browse/${issue.key}`,
}))
if (data.issues && data.issues.length > 0) {
data.issues.slice(0, 3).forEach((issue: any) => {
logger.info(`- ${issue.key}: ${issue.fields.summary}`)
})
}
return NextResponse.json({
issues: data.issues
? data.issues.map((issue: any) => ({
id: issue.key,
name: issue.fields.summary,
mimeType: 'jira/issue',
url: `https://${domain}/browse/${issue.key}`,
modifiedTime: issue.fields.updated,
webViewLink: `https://${domain}/browse/${issue.key}`,
}))
: [],
cloudId, // Return the cloudId so it can be cached
})
return NextResponse.json({ issues, cloudId })
} catch (error) {
logger.error('Error fetching Jira issues:', error)
return NextResponse.json(
@@ -111,83 +103,79 @@ export async function GET(request: Request) {
const providedCloudId = url.searchParams.get('cloudId')
const query = url.searchParams.get('query') || ''
const projectId = url.searchParams.get('projectId') || ''
const manualProjectId = url.searchParams.get('manualProjectId') || ''
const all = url.searchParams.get('all')?.toLowerCase() === 'true'
const limitParam = Number.parseInt(url.searchParams.get('limit') || '', 10)
const limit = Number.isFinite(limitParam) && limitParam > 0 ? limitParam : 0
if (!domain) {
return NextResponse.json({ error: 'Domain is required' }, { status: 400 })
}
if (!accessToken) {
return NextResponse.json({ error: 'Access token is required' }, { status: 400 })
}
// Use provided cloudId or fetch it if not provided
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
logger.info('Using cloud ID:', cloudId)
// Build query parameters
const params = new URLSearchParams()
// Only add query if it exists
if (query) {
params.append('query', query)
}
const validationError = validateRequiredParams(domain || null, accessToken || null)
if (validationError) return validationError
const cloudId = providedCloudId || (await getJiraCloudId(domain!, accessToken!))
let data: any
if (query) {
const apiUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/picker?${params.toString()}`
logger.info(`Fetching Jira issue suggestions from: ${apiUrl}`)
const params = new URLSearchParams({ query })
const apiUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/picker?${params}`
const response = await fetch(apiUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
})
logger.info('Response status:', response.status, response.statusText)
if (!response.ok) {
logger.error(`Jira API error: ${response.status} ${response.statusText}`)
let errorMessage
try {
const errorData = await response.json()
logger.error('Error details:', errorData)
errorMessage =
errorData.message || `Failed to fetch issue suggestions (${response.status})`
} catch (_e) {
errorMessage = `Failed to fetch issue suggestions: ${response.status} ${response.statusText}`
}
const errorMessage = await createErrorResponse(
response,
`Failed to fetch issue suggestions (${response.status})`
)
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
data = await response.json()
} else if (projectId) {
// When no query, list latest issues for the selected project using Search API
const searchParams = new URLSearchParams()
searchParams.append('jql', `project=${projectId} ORDER BY updated DESC`)
searchParams.append('maxResults', '25')
searchParams.append('fields', 'summary,key')
const searchUrl = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${searchParams.toString()}`
logger.info(`Fetching Jira issues via search from: ${searchUrl}`)
const response = await fetch(searchUrl, {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
})
if (!response.ok) {
let errorMessage
try {
const errorData = await response.json()
logger.error('Jira Search API error details:', errorData)
errorMessage =
errorData.errorMessages?.[0] || `Failed to fetch issues (${response.status})`
} catch (_e) {
errorMessage = `Failed to fetch issues: ${response.status} ${response.statusText}`
}
return NextResponse.json({ error: errorMessage }, { status: response.status })
} else if (projectId || manualProjectId) {
const SAFETY_CAP = 1000
const PAGE_SIZE = 100
const target = Math.min(all ? limit || SAFETY_CAP : 25, SAFETY_CAP)
const projectKey = (projectId || manualProjectId).trim()
const buildSearchUrl = (startAt: number) => {
const params = new URLSearchParams({
jql: `project=${projectKey} ORDER BY updated DESC`,
maxResults: String(Math.min(PAGE_SIZE, target)),
startAt: String(startAt),
fields: 'summary,key,updated',
})
return `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/search?${params}`
}
const searchData = await response.json()
const issues = (searchData.issues || []).map((it: any) => ({
let startAt = 0
let collected: any[] = []
let total = 0
do {
const response = await fetch(buildSearchUrl(startAt), {
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/json',
},
})
if (!response.ok) {
const errorMessage = await createErrorResponse(
response,
`Failed to fetch issues (${response.status})`
)
return NextResponse.json({ error: errorMessage }, { status: response.status })
}
const page = await response.json()
const issues = page.issues || []
total = page.total || issues.length
collected = collected.concat(issues)
startAt += PAGE_SIZE
} while (all && collected.length < Math.min(total, target))
const issues = collected.slice(0, target).map((it: any) => ({
key: it.key,
summary: it.fields?.summary || it.key,
}))
@@ -196,10 +184,7 @@ export async function GET(request: Request) {
data = { sections: [], cloudId }
}
return NextResponse.json({
...data,
cloudId, // Return the cloudId so it can be cached
})
return NextResponse.json({ ...data, cloudId })
} catch (error) {
logger.error('Error fetching Jira issue suggestions:', error)
return NextResponse.json(

View File

@@ -42,10 +42,7 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Summary is required' }, { status: 400 })
}
if (!issueType) {
logger.error('Missing issue type in request')
return NextResponse.json({ error: 'Issue type is required' }, { status: 400 })
}
const normalizedIssueType = issueType || 'Task'
// Use provided cloudId or fetch it if not provided
const cloudId = providedCloudId || (await getJiraCloudId(domain, accessToken))
@@ -62,7 +59,7 @@ export async function POST(request: Request) {
id: projectId,
},
issuetype: {
name: issueType,
name: normalizedIssueType,
},
summary: summary,
}

View File

@@ -3,6 +3,7 @@ import { LinearClient } from '@linear/sdk'
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
@@ -19,7 +20,7 @@ export async function POST(request: Request) {
return NextResponse.json({ error: 'Credential and teamId are required' }, { status: 400 })
}
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const authz = await authorizeCredentialUse(request as any, {
credentialId: credential,
workflowId,

View File

@@ -3,6 +3,7 @@ import { LinearClient } from '@linear/sdk'
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
@@ -11,7 +12,7 @@ const logger = createLogger('LinearTeamsAPI')
export async function POST(request: Request) {
try {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const body = await request.json()
const { credential, workflowId } = body

View File

@@ -115,7 +115,6 @@ const getChatDisplayName = async (
export async function POST(request: Request) {
try {
const requestId = crypto.randomUUID().slice(0, 8)
const body = await request.json()
const { credential, workflowId } = body

View File

@@ -1,6 +1,7 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
@@ -19,7 +20,7 @@ export async function POST(request: Request) {
}
try {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const authz = await authorizeCredentialUse(request as any, {
credentialId: credential,
workflowId,

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -48,7 +49,7 @@ export async function GET(request: Request) {
const accessToken = await refreshAccessTokenIfNeeded(
credentialId,
credentialOwnerUserId,
crypto.randomUUID().slice(0, 8)
generateRequestId()
)
if (!accessToken) {

View File

@@ -1,6 +1,7 @@
import { NextResponse } from 'next/server'
import { authorizeCredentialUse } from '@/lib/auth/credential-access'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
export const dynamic = 'force-dynamic'
@@ -17,7 +18,7 @@ interface SlackChannel {
export async function POST(request: Request) {
try {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const body = await request.json()
const { credential, workflowId } = body

View File

@@ -1,5 +1,6 @@
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import type { ThinkingToolParams, ThinkingToolResponse } from '@/tools/thinking/types'
const logger = createLogger('ThinkingToolAPI')
@@ -11,7 +12,7 @@ export const dynamic = 'force-dynamic'
* Simply acknowledges the thought by returning it in the output
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const body: ThinkingToolParams = await request.json()

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -14,7 +15,7 @@ const logger = createLogger('WealthboxItemAPI')
* Get a single item (note, contact, task) from Wealthbox
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the session

View File

@@ -2,6 +2,7 @@ import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { account } from '@/db/schema'
@@ -24,7 +25,7 @@ interface WealthboxItem {
* Get items (notes, contacts, tasks) from Wealthbox
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -2,6 +2,7 @@ import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { apiKey } from '@/db/schema'
@@ -12,7 +13,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -3,6 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { user } from '@/db/schema'
@@ -12,15 +13,22 @@ const logger = createLogger('UpdateUserProfileAPI')
const UpdateProfileSchema = z
.object({
name: z.string().min(1, 'Name is required').optional(),
image: z.string().url('Invalid image URL').optional(),
})
.refine((data) => data.name !== undefined, {
message: 'Name field must be provided',
.refine((data) => data.name !== undefined || data.image !== undefined, {
message: 'At least one field (name or image) must be provided',
})
interface UpdateData {
updatedAt: Date
name?: string
image?: string | null
}
export const dynamic = 'force-dynamic'
export async function PATCH(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -36,8 +44,9 @@ export async function PATCH(request: NextRequest) {
const validatedData = UpdateProfileSchema.parse(body)
// Build update object
const updateData: any = { updatedAt: new Date() }
const updateData: UpdateData = { updatedAt: new Date() }
if (validatedData.name !== undefined) updateData.name = validatedData.name
if (validatedData.image !== undefined) updateData.image = validatedData.image
// Update user profile
const [updatedUser] = await db
@@ -82,7 +91,7 @@ export async function PATCH(request: NextRequest) {
// GET endpoint to fetch current user profile
export async function GET() {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -1,79 +0,0 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { getSession } from '@/lib/auth'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { createLogger } from '@/lib/logs/console/logger'
import { createErrorResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
import { apiKey as apiKeyTable } from '@/db/schema'
import { RateLimiter } from '@/services/queue'
const logger = createLogger('RateLimitAPI')
export async function GET(request: NextRequest) {
try {
const session = await getSession()
let authenticatedUserId: string | null = session?.user?.id || null
if (!authenticatedUserId) {
const apiKeyHeader = request.headers.get('x-api-key')
if (apiKeyHeader) {
const [apiKeyRecord] = await db
.select({ userId: apiKeyTable.userId })
.from(apiKeyTable)
.where(eq(apiKeyTable.key, apiKeyHeader))
.limit(1)
if (apiKeyRecord) {
authenticatedUserId = apiKeyRecord.userId
}
}
}
if (!authenticatedUserId) {
return createErrorResponse('Authentication required', 401)
}
// Get user subscription (checks both personal and org subscriptions)
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
const rateLimiter = new RateLimiter()
const isApiAuth = !session?.user?.id
const triggerType = isApiAuth ? 'api' : 'manual'
const syncStatus = await rateLimiter.getRateLimitStatusWithSubscription(
authenticatedUserId,
userSubscription,
triggerType,
false
)
const asyncStatus = await rateLimiter.getRateLimitStatusWithSubscription(
authenticatedUserId,
userSubscription,
triggerType,
true
)
return NextResponse.json({
success: true,
rateLimit: {
sync: {
isLimited: syncStatus.remaining === 0,
limit: syncStatus.limit,
remaining: syncStatus.remaining,
resetAt: syncStatus.resetAt,
},
async: {
isLimited: asyncStatus.remaining === 0,
limit: asyncStatus.limit,
remaining: asyncStatus.remaining,
resetAt: asyncStatus.resetAt,
},
authType: triggerType,
},
})
} catch (error: any) {
logger.error('Error checking rate limit:', error)
return createErrorResponse(error.message || 'Failed to check rate limit', 500)
}
}

View File

@@ -4,6 +4,7 @@ import { NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { settings } from '@/db/schema'
@@ -24,6 +25,7 @@ const SettingsSchema = z.object({
unsubscribeNotifications: z.boolean().optional(),
})
.optional(),
billingUsageNotificationsEnabled: z.boolean().optional(),
})
// Default settings values
@@ -35,10 +37,11 @@ const defaultSettings = {
consoleExpandedByDefault: true,
telemetryEnabled: true,
emailPreferences: {},
billingUsageNotificationsEnabled: true,
}
export async function GET() {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -68,6 +71,7 @@ export async function GET() {
consoleExpandedByDefault: userSettings.consoleExpandedByDefault,
telemetryEnabled: userSettings.telemetryEnabled,
emailPreferences: userSettings.emailPreferences ?? {},
billingUsageNotificationsEnabled: userSettings.billingUsageNotificationsEnabled ?? true,
},
},
{ status: 200 }
@@ -80,7 +84,7 @@ export async function GET() {
}
export async function PATCH(request: Request) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()

View File

@@ -9,6 +9,7 @@ import {
verifyUnsubscribeToken,
} from '@/lib/email/unsubscribe'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
const logger = createLogger('UnsubscribeAPI')
@@ -19,7 +20,7 @@ const unsubscribeSchema = z.object({
})
export async function GET(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { searchParams } = new URL(req.url)
@@ -63,7 +64,7 @@ export async function GET(req: NextRequest) {
}
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const body = await req.json()

View File

@@ -0,0 +1,74 @@
import { type NextRequest, NextResponse } from 'next/server'
import { checkHybridAuth } from '@/lib/auth/hybrid'
import { checkServerSideUsageLimits } from '@/lib/billing'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { getEffectiveCurrentPeriodCost } from '@/lib/billing/core/usage'
import { createLogger } from '@/lib/logs/console/logger'
import { createErrorResponse } from '@/app/api/workflows/utils'
import { RateLimiter } from '@/services/queue'
const logger = createLogger('UsageLimitsAPI')
export async function GET(request: NextRequest) {
try {
const auth = await checkHybridAuth(request, { requireWorkflowId: false })
if (!auth.success || !auth.userId) {
return createErrorResponse('Authentication required', 401)
}
const authenticatedUserId = auth.userId
// Rate limit info (sync + async), mirroring /users/me/rate-limit
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
const rateLimiter = new RateLimiter()
const triggerType = auth.authType === 'api_key' ? 'api' : 'manual'
const [syncStatus, asyncStatus] = await Promise.all([
rateLimiter.getRateLimitStatusWithSubscription(
authenticatedUserId,
userSubscription,
triggerType,
false
),
rateLimiter.getRateLimitStatusWithSubscription(
authenticatedUserId,
userSubscription,
triggerType,
true
),
])
// Usage summary (current period cost + limit + plan)
const [usageCheck, effectiveCost] = await Promise.all([
checkServerSideUsageLimits(authenticatedUserId),
getEffectiveCurrentPeriodCost(authenticatedUserId),
])
const currentPeriodCost = effectiveCost
return NextResponse.json({
success: true,
rateLimit: {
sync: {
isLimited: syncStatus.remaining === 0,
limit: syncStatus.limit,
remaining: syncStatus.remaining,
resetAt: syncStatus.resetAt,
},
async: {
isLimited: asyncStatus.remaining === 0,
limit: asyncStatus.limit,
remaining: asyncStatus.remaining,
resetAt: asyncStatus.resetAt,
},
authType: triggerType,
},
usage: {
currentPeriodCost,
limit: usageCheck.limit,
plan: userSubscription?.plan || 'free',
},
})
} catch (error: any) {
logger.error('Error checking usage limits:', error)
return createErrorResponse(error.message || 'Failed to check usage limits', 500)
}
}

View File

@@ -0,0 +1,64 @@
import { eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { db } from '@/db'
import { apiKey as apiKeyTable } from '@/db/schema'
const logger = createLogger('V1Auth')
export interface AuthResult {
authenticated: boolean
userId?: string
error?: string
}
export async function authenticateApiKey(request: NextRequest): Promise<AuthResult> {
const apiKey = request.headers.get('x-api-key')
if (!apiKey) {
return {
authenticated: false,
error: 'API key required',
}
}
try {
const [keyRecord] = await db
.select({
userId: apiKeyTable.userId,
expiresAt: apiKeyTable.expiresAt,
})
.from(apiKeyTable)
.where(eq(apiKeyTable.key, apiKey))
.limit(1)
if (!keyRecord) {
logger.warn('Invalid API key attempted', { keyPrefix: apiKey.slice(0, 8) })
return {
authenticated: false,
error: 'Invalid API key',
}
}
if (keyRecord.expiresAt && keyRecord.expiresAt < new Date()) {
logger.warn('Expired API key attempted', { userId: keyRecord.userId })
return {
authenticated: false,
error: 'API key expired',
}
}
await db.update(apiKeyTable).set({ lastUsed: new Date() }).where(eq(apiKeyTable.key, apiKey))
return {
authenticated: true,
userId: keyRecord.userId,
}
} catch (error) {
logger.error('API key authentication error', { error })
return {
authenticated: false,
error: 'Authentication failed',
}
}
}

View File

@@ -0,0 +1,106 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('V1LogDetailsAPI')
export const revalidate = 0
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'logs-detail')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { id } = await params
const rows = await db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
stateSnapshotId: workflowExecutionLogs.stateSnapshotId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
executionData: workflowExecutionLogs.executionData,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
createdAt: workflowExecutionLogs.createdAt,
workflowName: workflow.name,
workflowDescription: workflow.description,
workflowColor: workflow.color,
workflowFolderId: workflow.folderId,
workflowUserId: workflow.userId,
workflowWorkspaceId: workflow.workspaceId,
workflowCreatedAt: workflow.createdAt,
workflowUpdatedAt: workflow.updatedAt,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.id, id))
.limit(1)
const log = rows[0]
if (!log) {
return NextResponse.json({ error: 'Log not found' }, { status: 404 })
}
const workflowSummary = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
color: log.workflowColor,
folderId: log.workflowFolderId,
userId: log.workflowUserId,
workspaceId: log.workflowWorkspaceId,
createdAt: log.workflowCreatedAt,
updatedAt: log.workflowUpdatedAt,
}
const response = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
trigger: log.trigger,
startedAt: log.startedAt.toISOString(),
endedAt: log.endedAt?.toISOString() || null,
totalDurationMs: log.totalDurationMs,
files: log.files || undefined,
workflow: workflowSummary,
executionData: log.executionData as any,
cost: log.cost as any,
createdAt: log.createdAt.toISOString(),
}
// Get user's workflow execution limits and usage
const limits = await getUserLimits(userId)
// Create response with limits information
const apiResponse = createApiResponse({ data: response }, limits, rateLimit)
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
} catch (error: any) {
logger.error(`[${requestId}] Log details fetch error`, { error: error.message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,100 @@
import { and, eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
import { db } from '@/db'
import {
permissions,
workflow,
workflowExecutionLogs,
workflowExecutionSnapshots,
} from '@/db/schema'
const logger = createLogger('V1ExecutionAPI')
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ executionId: string }> }
) {
try {
const rateLimit = await checkRateLimit(request, 'logs-detail')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { executionId } = await params
logger.debug(`Fetching execution data for: ${executionId}`)
const rows = await db
.select({
log: workflowExecutionLogs,
workflow: workflow,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(eq(workflowExecutionLogs.executionId, executionId))
.limit(1)
if (rows.length === 0) {
return NextResponse.json({ error: 'Workflow execution not found' }, { status: 404 })
}
const { log: workflowLog } = rows[0]
const [snapshot] = await db
.select()
.from(workflowExecutionSnapshots)
.where(eq(workflowExecutionSnapshots.id, workflowLog.stateSnapshotId))
.limit(1)
if (!snapshot) {
return NextResponse.json({ error: 'Workflow state snapshot not found' }, { status: 404 })
}
const response = {
executionId,
workflowId: workflowLog.workflowId,
workflowState: snapshot.stateData,
executionMetadata: {
trigger: workflowLog.trigger,
startedAt: workflowLog.startedAt.toISOString(),
endedAt: workflowLog.endedAt?.toISOString(),
totalDurationMs: workflowLog.totalDurationMs,
cost: workflowLog.cost || null,
},
}
logger.debug(`Successfully fetched execution data for: ${executionId}`)
logger.debug(
`Workflow state contains ${Object.keys((snapshot.stateData as any)?.blocks || {}).length} blocks`
)
// Get user's workflow execution limits and usage
const limits = await getUserLimits(userId)
// Create response with limits information
const apiResponse = createApiResponse(
{
...response,
},
limits,
rateLimit
)
return NextResponse.json(apiResponse.body, { headers: apiResponse.headers })
} catch (error) {
logger.error('Error fetching execution data:', error)
return NextResponse.json({ error: 'Failed to fetch execution data' }, { status: 500 })
}
}

View File

@@ -0,0 +1,110 @@
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { workflow, workflowExecutionLogs } from '@/db/schema'
export interface LogFilters {
workspaceId: string
workflowIds?: string[]
folderIds?: string[]
triggers?: string[]
level?: 'info' | 'error'
startDate?: Date
endDate?: Date
executionId?: string
minDurationMs?: number
maxDurationMs?: number
minCost?: number
maxCost?: number
model?: string
cursor?: {
startedAt: string
id: string
}
order?: 'desc' | 'asc'
}
export function buildLogFilters(filters: LogFilters): SQL<unknown> {
const conditions: SQL<unknown>[] = []
// Required: workspace and permissions check
conditions.push(eq(workflow.workspaceId, filters.workspaceId))
// Cursor-based pagination
if (filters.cursor) {
const cursorDate = new Date(filters.cursor.startedAt)
if (filters.order === 'desc') {
conditions.push(
sql`(${workflowExecutionLogs.startedAt}, ${workflowExecutionLogs.id}) < (${cursorDate}, ${filters.cursor.id})`
)
} else {
conditions.push(
sql`(${workflowExecutionLogs.startedAt}, ${workflowExecutionLogs.id}) > (${cursorDate}, ${filters.cursor.id})`
)
}
}
// Workflow IDs filter
if (filters.workflowIds && filters.workflowIds.length > 0) {
conditions.push(inArray(workflow.id, filters.workflowIds))
}
// Folder IDs filter
if (filters.folderIds && filters.folderIds.length > 0) {
conditions.push(inArray(workflow.folderId, filters.folderIds))
}
// Triggers filter
if (filters.triggers && filters.triggers.length > 0 && !filters.triggers.includes('all')) {
conditions.push(inArray(workflowExecutionLogs.trigger, filters.triggers))
}
// Level filter
if (filters.level) {
conditions.push(eq(workflowExecutionLogs.level, filters.level))
}
// Date range filters
if (filters.startDate) {
conditions.push(gte(workflowExecutionLogs.startedAt, filters.startDate))
}
if (filters.endDate) {
conditions.push(lte(workflowExecutionLogs.startedAt, filters.endDate))
}
// Search filter (execution ID)
if (filters.executionId) {
conditions.push(eq(workflowExecutionLogs.executionId, filters.executionId))
}
// Duration filters
if (filters.minDurationMs !== undefined) {
conditions.push(gte(workflowExecutionLogs.totalDurationMs, filters.minDurationMs))
}
if (filters.maxDurationMs !== undefined) {
conditions.push(lte(workflowExecutionLogs.totalDurationMs, filters.maxDurationMs))
}
// Cost filters
if (filters.minCost !== undefined) {
conditions.push(sql`(${workflowExecutionLogs.cost}->>'total')::numeric >= ${filters.minCost}`)
}
if (filters.maxCost !== undefined) {
conditions.push(sql`(${workflowExecutionLogs.cost}->>'total')::numeric <= ${filters.maxCost}`)
}
// Model filter
if (filters.model) {
conditions.push(sql`${workflowExecutionLogs.cost}->>'models' ? ${filters.model}`)
}
// Combine all conditions with AND
return conditions.length > 0 ? and(...conditions)! : sql`true`
}
export function getOrderBy(order: 'desc' | 'asc' = 'desc') {
return order === 'desc'
? desc(workflowExecutionLogs.startedAt)
: sql`${workflowExecutionLogs.startedAt} ASC`
}

View File

@@ -0,0 +1,78 @@
import { checkServerSideUsageLimits } from '@/lib/billing'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { getEffectiveCurrentPeriodCost } from '@/lib/billing/core/usage'
import { RateLimiter } from '@/services/queue'
export interface UserLimits {
workflowExecutionRateLimit: {
sync: {
limit: number
remaining: number
resetAt: string
}
async: {
limit: number
remaining: number
resetAt: string
}
}
usage: {
currentPeriodCost: number
limit: number
plan: string
isExceeded: boolean
}
}
export async function getUserLimits(userId: string): Promise<UserLimits> {
const [userSubscription, usageCheck, effectiveCost, rateLimiter] = await Promise.all([
getHighestPrioritySubscription(userId),
checkServerSideUsageLimits(userId),
getEffectiveCurrentPeriodCost(userId),
Promise.resolve(new RateLimiter()),
])
const [syncStatus, asyncStatus] = await Promise.all([
rateLimiter.getRateLimitStatusWithSubscription(userId, userSubscription, 'api', false),
rateLimiter.getRateLimitStatusWithSubscription(userId, userSubscription, 'api', true),
])
return {
workflowExecutionRateLimit: {
sync: {
limit: syncStatus.limit,
remaining: syncStatus.remaining,
resetAt: syncStatus.resetAt.toISOString(),
},
async: {
limit: asyncStatus.limit,
remaining: asyncStatus.remaining,
resetAt: asyncStatus.resetAt.toISOString(),
},
},
usage: {
currentPeriodCost: effectiveCost,
limit: usageCheck.limit,
plan: userSubscription?.plan || 'free',
isExceeded: usageCheck.isExceeded,
},
}
}
export function createApiResponse<T>(
data: T,
limits: UserLimits,
apiRateLimit: { limit: number; remaining: number; resetAt: Date }
) {
return {
body: {
...data,
limits,
},
headers: {
'X-RateLimit-Limit': apiRateLimit.limit.toString(),
'X-RateLimit-Remaining': apiRateLimit.remaining.toString(),
'X-RateLimit-Reset': apiRateLimit.resetAt.toISOString(),
},
}
}

View File

@@ -0,0 +1,212 @@
import { and, eq, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { createLogger } from '@/lib/logs/console/logger'
import { buildLogFilters, getOrderBy } from '@/app/api/v1/logs/filters'
import { createApiResponse, getUserLimits } from '@/app/api/v1/logs/meta'
import { checkRateLimit, createRateLimitResponse } from '@/app/api/v1/middleware'
import { db } from '@/db'
import { permissions, workflow, workflowExecutionLogs } from '@/db/schema'
const logger = createLogger('V1LogsAPI')
export const dynamic = 'force-dynamic'
export const revalidate = 0
const QueryParamsSchema = z.object({
workspaceId: z.string(),
workflowIds: z.string().optional(),
folderIds: z.string().optional(),
triggers: z.string().optional(),
level: z.enum(['info', 'error']).optional(),
startDate: z.string().optional(),
endDate: z.string().optional(),
executionId: z.string().optional(),
minDurationMs: z.coerce.number().optional(),
maxDurationMs: z.coerce.number().optional(),
minCost: z.coerce.number().optional(),
maxCost: z.coerce.number().optional(),
model: z.string().optional(),
details: z.enum(['basic', 'full']).optional().default('basic'),
includeTraceSpans: z.coerce.boolean().optional().default(false),
includeFinalOutput: z.coerce.boolean().optional().default(false),
limit: z.coerce.number().optional().default(100),
cursor: z.string().optional(),
order: z.enum(['desc', 'asc']).optional().default('desc'),
})
interface CursorData {
startedAt: string
id: string
}
function encodeCursor(data: CursorData): string {
return Buffer.from(JSON.stringify(data)).toString('base64')
}
function decodeCursor(cursor: string): CursorData | null {
try {
return JSON.parse(Buffer.from(cursor, 'base64').toString())
} catch {
return null
}
}
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
const rateLimit = await checkRateLimit(request, 'logs')
if (!rateLimit.allowed) {
return createRateLimitResponse(rateLimit)
}
const userId = rateLimit.userId!
const { searchParams } = new URL(request.url)
const rawParams = Object.fromEntries(searchParams.entries())
const validationResult = QueryParamsSchema.safeParse(rawParams)
if (!validationResult.success) {
return NextResponse.json(
{ error: 'Invalid parameters', details: validationResult.error.errors },
{ status: 400 }
)
}
const params = validationResult.data
logger.info(`[${requestId}] Fetching logs for workspace ${params.workspaceId}`, {
userId,
filters: {
workflowIds: params.workflowIds,
triggers: params.triggers,
level: params.level,
},
})
// Build filter conditions
const filters = {
workspaceId: params.workspaceId,
workflowIds: params.workflowIds?.split(',').filter(Boolean),
folderIds: params.folderIds?.split(',').filter(Boolean),
triggers: params.triggers?.split(',').filter(Boolean),
level: params.level,
startDate: params.startDate ? new Date(params.startDate) : undefined,
endDate: params.endDate ? new Date(params.endDate) : undefined,
executionId: params.executionId,
minDurationMs: params.minDurationMs,
maxDurationMs: params.maxDurationMs,
minCost: params.minCost,
maxCost: params.maxCost,
model: params.model,
cursor: params.cursor ? decodeCursor(params.cursor) || undefined : undefined,
order: params.order,
}
const conditions = buildLogFilters(filters)
const orderBy = getOrderBy(params.order)
// Build and execute query
const baseQuery = db
.select({
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
cost: workflowExecutionLogs.cost,
files: workflowExecutionLogs.files,
executionData: params.details === 'full' ? workflowExecutionLogs.executionData : sql`null`,
workflowName: workflow.name,
workflowDescription: workflow.description,
})
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, params.workspaceId),
eq(permissions.userId, userId)
)
)
const logs = await baseQuery
.where(conditions)
.orderBy(orderBy)
.limit(params.limit + 1)
const hasMore = logs.length > params.limit
const data = logs.slice(0, params.limit)
let nextCursor: string | undefined
if (hasMore && data.length > 0) {
const lastLog = data[data.length - 1]
nextCursor = encodeCursor({
startedAt: lastLog.startedAt.toISOString(),
id: lastLog.id,
})
}
const formattedLogs = data.map((log) => {
const result: any = {
id: log.id,
workflowId: log.workflowId,
executionId: log.executionId,
level: log.level,
trigger: log.trigger,
startedAt: log.startedAt.toISOString(),
endedAt: log.endedAt?.toISOString() || null,
totalDurationMs: log.totalDurationMs,
cost: log.cost ? { total: (log.cost as any).total } : null,
files: log.files || null,
}
if (params.details === 'full') {
result.workflow = {
id: log.workflowId,
name: log.workflowName,
description: log.workflowDescription,
}
if (log.cost) {
result.cost = log.cost
}
if (log.executionData) {
const execData = log.executionData as any
if (params.includeFinalOutput && execData.finalOutput) {
result.finalOutput = execData.finalOutput
}
if (params.includeTraceSpans && execData.traceSpans) {
result.traceSpans = execData.traceSpans
}
}
}
return result
})
// Get user's workflow execution limits and usage
const limits = await getUserLimits(userId)
// Create response with limits information
// The rateLimit object from checkRateLimit is for THIS API endpoint's rate limits
const response = createApiResponse(
{
data: formattedLogs,
nextCursor,
},
limits,
rateLimit // This is the API endpoint rate limit, not workflow execution limits
)
return NextResponse.json(response.body, { headers: response.headers })
} catch (error: any) {
logger.error(`[${requestId}] Logs fetch error`, { error: error.message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -0,0 +1,108 @@
import { type NextRequest, NextResponse } from 'next/server'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { createLogger } from '@/lib/logs/console/logger'
import { RateLimiter } from '@/services/queue/RateLimiter'
import { authenticateApiKey } from './auth'
const logger = createLogger('V1Middleware')
const rateLimiter = new RateLimiter()
export interface RateLimitResult {
allowed: boolean
remaining: number
resetAt: Date
limit: number
userId?: string
error?: string
}
export async function checkRateLimit(
request: NextRequest,
endpoint: 'logs' | 'logs-detail' = 'logs'
): Promise<RateLimitResult> {
try {
const auth = await authenticateApiKey(request)
if (!auth.authenticated) {
return {
allowed: false,
remaining: 0,
limit: 10, // Default to free tier limit
resetAt: new Date(),
error: auth.error,
}
}
const userId = auth.userId!
const subscription = await getHighestPrioritySubscription(userId)
// Use api-endpoint trigger type for external API rate limiting
const result = await rateLimiter.checkRateLimitWithSubscription(
userId,
subscription,
'api-endpoint',
false // Not relevant for api-endpoint trigger type
)
if (!result.allowed) {
logger.warn(`Rate limit exceeded for user ${userId}`, {
endpoint,
remaining: result.remaining,
resetAt: result.resetAt,
})
}
// Get the actual rate limit for this user's plan
const rateLimitStatus = await rateLimiter.getRateLimitStatusWithSubscription(
userId,
subscription,
'api-endpoint',
false
)
return {
...result,
limit: rateLimitStatus.limit,
userId,
}
} catch (error) {
logger.error('Rate limit check error', { error })
return {
allowed: false,
remaining: 0,
limit: 10,
resetAt: new Date(Date.now() + 60000),
error: 'Rate limit check failed',
}
}
}
export function createRateLimitResponse(result: RateLimitResult): NextResponse {
const headers = {
'X-RateLimit-Limit': result.limit.toString(),
'X-RateLimit-Remaining': result.remaining.toString(),
'X-RateLimit-Reset': result.resetAt.toISOString(),
}
if (result.error) {
return NextResponse.json({ error: result.error || 'Unauthorized' }, { status: 401, headers })
}
if (!result.allowed) {
return NextResponse.json(
{
error: 'Rate limit exceeded',
message: `API rate limit exceeded. Please retry after ${result.resetAt.toISOString()}`,
retryAfter: result.resetAt.getTime(),
},
{
status: 429,
headers: {
...headers,
'Retry-After': Math.ceil((result.resetAt.getTime() - Date.now()) / 1000).toString(),
},
}
)
}
return NextResponse.json({ error: 'Bad request' }, { status: 400, headers })
}

View File

@@ -4,6 +4,7 @@ import OpenAI, { AzureOpenAI } from 'openai'
import { env } from '@/lib/env'
import { getCostMultiplier, isBillingEnabled } from '@/lib/environment'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { userStats, workflow } from '@/db/schema'
import { getModelPricing } from '@/providers/utils'
@@ -138,7 +139,7 @@ async function updateUserStatsForWand(
}
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
logger.info(`[${requestId}] Received wand generation request`)
if (!client) {

View File

@@ -4,6 +4,7 @@ import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { webhook, workflow } from '@/db/schema'
@@ -14,7 +15,7 @@ export const dynamic = 'force-dynamic'
// Get a specific webhook
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { id } = await params
@@ -83,7 +84,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
// Update a webhook
export async function PATCH(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { id } = await params
@@ -181,7 +182,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const { id } = await params

View File

@@ -5,6 +5,7 @@ import { getSession } from '@/lib/auth'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { getOAuthToken } from '@/app/api/auth/oauth/utils'
import { db } from '@/db'
import { webhook, workflow } from '@/db/schema'
@@ -15,7 +16,7 @@ export const dynamic = 'force-dynamic'
// Get all webhooks for the current user
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const session = await getSession()
@@ -108,7 +109,7 @@ export async function GET(request: NextRequest) {
// Create or Update a webhook
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const userId = (await getSession())?.user?.id
if (!userId) {

View File

@@ -1,6 +1,7 @@
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { webhook } from '@/db/schema'
@@ -9,7 +10,7 @@ const logger = createLogger('WebhookTestAPI')
export const dynamic = 'force-dynamic'
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
// Get the webhook ID and provider from the query parameters

View File

@@ -5,6 +5,7 @@ import { checkServerSideUsageLimits } from '@/lib/billing'
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
import { env, isTruthy } from '@/lib/env'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import {
handleSlackChallenge,
handleWhatsAppVerification,
@@ -14,7 +15,6 @@ import { executeWebhookJob } from '@/background/webhook-execution'
import { db } from '@/db'
import { webhook, workflow } from '@/db/schema'
import { RateLimiter } from '@/services/queue'
import type { SubscriptionPlan } from '@/services/queue/types'
const logger = createLogger('WebhookTriggerAPI')
@@ -28,7 +28,7 @@ export const runtime = 'nodejs'
* Handles verification requests from webhook providers and confirms endpoint exists.
*/
export async function GET(request: NextRequest, { params }: { params: Promise<{ path: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
const path = (await params).path
@@ -84,7 +84,7 @@ export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ path: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
let foundWorkflow: any = null
let foundWebhook: any = null
@@ -248,14 +248,10 @@ export async function POST(
}
// --- PHASE 3: Rate limiting for webhook execution ---
let isEnterprise = false
try {
// Get user subscription for rate limiting (checks both personal and org subscriptions)
const userSubscription = await getHighestPrioritySubscription(foundWorkflow.userId)
const subscriptionPlan = (userSubscription?.plan || 'free') as SubscriptionPlan
isEnterprise = subscriptionPlan === 'enterprise'
// Check async rate limits (webhooks are processed asynchronously)
const rateLimiter = new RateLimiter()
const rateLimitCheck = await rateLimiter.checkRateLimitWithSubscription(
@@ -332,7 +328,7 @@ export async function POST(
// Continue processing - better to risk usage limit bypass than fail webhook
}
// --- PHASE 5: Queue webhook execution (trigger.dev or direct based on plan/env) ---
// --- PHASE 5: Queue webhook execution (trigger.dev or direct based on env) ---
try {
const payload = {
webhookId: foundWebhook.id,
@@ -345,9 +341,7 @@ export async function POST(
blockId: foundWebhook.blockId,
}
// Enterprise users always execute directly, others check TRIGGER_DEV_ENABLED env
// Note: isEnterprise was already determined during rate limiting phase
const useTrigger = !isEnterprise && isTruthy(env.TRIGGER_DEV_ENABLED)
const useTrigger = isTruthy(env.TRIGGER_DEV_ENABLED)
if (useTrigger) {
const handle = await tasks.trigger('webhook-execution', payload)
@@ -359,9 +353,8 @@ export async function POST(
void executeWebhookJob(payload).catch((error) => {
logger.error(`[${requestId}] Direct webhook execution failed`, error)
})
const reason = isEnterprise ? 'Enterprise plan' : 'Trigger.dev disabled'
logger.info(
`[${requestId}] Queued direct webhook execution for ${foundWebhook.provider} webhook (${reason})`
`[${requestId}] Queued direct webhook execution for ${foundWebhook.provider} webhook (Trigger.dev disabled)`
)
}

View File

@@ -5,6 +5,7 @@ import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { simAgentClient } from '@/lib/sim-agent'
import { generateRequestId } from '@/lib/utils'
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/db-helpers'
import { getAllBlocks } from '@/blocks/registry'
import type { BlockConfig } from '@/blocks/types'
@@ -48,7 +49,7 @@ type AutoLayoutRequest = z.infer<typeof AutoLayoutRequestSchema>
* Apply autolayout to an existing workflow
*/
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = Date.now()
const { id: workflowId } = await params

View File

@@ -1,5 +1,6 @@
import { eq } from 'drizzle-orm'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
import { chat } from '@/db/schema'
@@ -11,7 +12,7 @@ const logger = createLogger('ChatStatusAPI')
*/
export async function GET(_request: Request, { params }: { params: Promise<{ id: string }> }) {
const { id } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
try {
logger.debug(`[${requestId}] Checking chat deployment status for workflow: ${id}`)

View File

@@ -12,6 +12,7 @@ describe('Workflow Deployment API Route', () => {
vi.doMock('@/lib/utils', () => ({
generateApiKey: vi.fn().mockReturnValue('sim_testkeygenerated12345'),
generateRequestId: vi.fn(() => 'test-request-id'),
}))
vi.doMock('uuid', () => ({

View File

@@ -2,7 +2,7 @@ import { and, desc, eq } from 'drizzle-orm'
import type { NextRequest } from 'next/server'
import { v4 as uuidv4 } from 'uuid'
import { createLogger } from '@/lib/logs/console/logger'
import { generateApiKey } from '@/lib/utils'
import { generateApiKey, generateRequestId } from '@/lib/utils'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
@@ -14,7 +14,7 @@ export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -133,7 +133,7 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
}
export async function POST(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -195,14 +195,31 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
// Process blocks
blocks.forEach((block) => {
const parentId = block.parentId || null
const extent = block.extent || null
const blockData = {
...(block.data || {}),
...(parentId && { parentId }),
...(extent && { extent }),
}
blocksMap[block.id] = {
id: block.id,
type: block.type,
name: block.name,
position: { x: Number(block.positionX), y: Number(block.positionY) },
data: block.data,
data: blockData,
enabled: block.enabled,
subBlocks: block.subBlocks || {},
// Preserve execution-relevant flags so serializer behavior matches manual runs
isWide: block.isWide ?? false,
advancedMode: block.advancedMode ?? false,
triggerMode: block.triggerMode ?? false,
outputs: block.outputs || {},
horizontalHandles: block.horizontalHandles ?? true,
height: Number(block.height || 0),
parentId,
extent,
}
})
@@ -351,7 +368,7 @@ export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -1,6 +1,7 @@
import { eq } from 'drizzle-orm'
import type { NextRequest, NextResponse } from 'next/server'
import { createLogger } from '@/lib/logs/console/logger'
import { generateRequestId } from '@/lib/utils'
import { validateWorkflowAccess } from '@/app/api/workflows/middleware'
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
import { db } from '@/db'
@@ -18,7 +19,7 @@ function addNoCacheHeaders(response: NextResponse): NextResponse {
}
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {

View File

@@ -1,10 +1,10 @@
import crypto from 'crypto'
import { eq } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
import { getUserEntityPermissions } from '@/lib/permissions/utils'
import { generateRequestId } from '@/lib/utils'
import { db } from '@/db'
import { workflow, workflowBlocks, workflowEdges, workflowSubflows } from '@/db/schema'
import type { Variable } from '@/stores/panel/variables/types'
@@ -23,7 +23,7 @@ const DuplicateRequestSchema = z.object({
// POST /api/workflows/[id]/duplicate - Duplicate a workflow with all its blocks, edges, and subflows
export async function POST(req: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const { id: sourceWorkflowId } = await params
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const startTime = Date.now()
const session = await getSession()

View File

@@ -162,6 +162,7 @@ describe('Workflow Execution API Route', () => {
}),
isHosted: vi.fn().mockReturnValue(false),
getRotatingApiKey: vi.fn().mockReturnValue('rotated-api-key'),
generateRequestId: vi.fn(() => 'test-request-id'),
}))
vi.doMock('@/lib/logs/execution/logging-session', () => ({

View File

@@ -10,7 +10,7 @@ import { getPersonalAndWorkspaceEnv } from '@/lib/environment/utils'
import { createLogger } from '@/lib/logs/console/logger'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { decryptSecret } from '@/lib/utils'
import { decryptSecret, generateRequestId } from '@/lib/utils'
import { loadDeployedWorkflowState } from '@/lib/workflows/db-helpers'
import {
createHttpResponseFromBlock,
@@ -23,12 +23,7 @@ import { db } from '@/db'
import { userStats } from '@/db/schema'
import { Executor } from '@/executor'
import { Serializer } from '@/serializer'
import {
RateLimitError,
RateLimiter,
type SubscriptionPlan,
type TriggerType,
} from '@/services/queue'
import { RateLimitError, RateLimiter, type TriggerType } from '@/services/queue'
import { mergeSubblockState } from '@/stores/workflows/server-utils'
const logger = createLogger('WorkflowExecuteAPI')
@@ -347,7 +342,7 @@ async function executeWorkflow(
}
export async function GET(request: NextRequest, { params }: { params: Promise<{ id: string }> }) {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const { id } = await params
try {
@@ -378,8 +373,6 @@ export async function GET(request: NextRequest, { params }: { params: Promise<{
// Get user subscription (checks both personal and org subscriptions)
const userSubscription = await getHighestPrioritySubscription(validation.workflow.userId)
const subscriptionPlan = (userSubscription?.plan || 'free') as SubscriptionPlan
const rateLimiter = new RateLimiter()
const rateLimitCheck = await rateLimiter.checkRateLimitWithSubscription(
validation.workflow.userId,
@@ -447,7 +440,7 @@ export async function POST(
request: Request,
{ params }: { params: Promise<{ id: string }> }
): Promise<Response> {
const requestId = crypto.randomUUID().slice(0, 8)
const requestId = generateRequestId()
const logger = createLogger('WorkflowExecuteAPI')
logger.info(`[${requestId}] Raw request body: `)
@@ -505,8 +498,6 @@ export async function POST(
// Get user subscription (checks both personal and org subscriptions)
const userSubscription = await getHighestPrioritySubscription(authenticatedUserId)
const subscriptionPlan = (userSubscription?.plan || 'free') as SubscriptionPlan
if (isAsync) {
try {
const rateLimiter = new RateLimiter()

Some files were not shown because too many files have changed in this diff Show More