mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-22 03:01:08 -05:00
Compare commits
33 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d86ea01f0 | ||
|
|
04286fc16b | ||
|
|
c52f78c840 | ||
|
|
e318bf2e65 | ||
|
|
4913799a27 | ||
|
|
ccb4f5956d | ||
|
|
2a6d4fcb96 | ||
|
|
115f04e989 | ||
|
|
42020c3ae2 | ||
|
|
34d92fae89 | ||
|
|
a98463a486 | ||
|
|
765a481864 | ||
|
|
a1400caea0 | ||
|
|
2fc2e12cb2 | ||
|
|
3fa4bb4c12 | ||
|
|
67aa4bb332 | ||
|
|
1b8d666c93 | ||
|
|
71942cb53c | ||
|
|
12534163c1 | ||
|
|
55920e9b03 | ||
|
|
958dd64740 | ||
|
|
68f44b8df4 | ||
|
|
9920882dc5 | ||
|
|
9ca5254c2b | ||
|
|
d7fddb2909 | ||
|
|
61c7afc19e | ||
|
|
3c470ab0f8 | ||
|
|
2b5e436a2a | ||
|
|
e24c824c9a | ||
|
|
15ace5e63f | ||
|
|
fdca73679d | ||
|
|
da46a387c9 | ||
|
|
b7e377ec4b |
@@ -454,6 +454,8 @@ Enables AI-assisted field generation.
|
||||
|
||||
## Tools Configuration
|
||||
|
||||
**Important:** `tools.config.tool` runs during serialization before variable resolution. Put `Number()` and other type coercions in `tools.config.params` instead, which runs at execution time after variables are resolved.
|
||||
|
||||
**Preferred:** Use tool names directly as dropdown option IDs to avoid switch cases:
|
||||
```typescript
|
||||
// Dropdown options use tool IDs directly
|
||||
|
||||
2
.github/workflows/images.yml
vendored
2
.github/workflows/images.yml
vendored
@@ -146,7 +146,7 @@ jobs:
|
||||
|
||||
create-ghcr-manifests:
|
||||
name: Create GHCR Manifests
|
||||
runs-on: blacksmith-8vcpu-ubuntu-2404
|
||||
runs-on: blacksmith-2vcpu-ubuntu-2404
|
||||
needs: [build-amd64, build-ghcr-arm64]
|
||||
if: github.ref == 'refs/heads/main'
|
||||
strategy:
|
||||
|
||||
2
.github/workflows/test-build.yml
vendored
2
.github/workflows/test-build.yml
vendored
@@ -110,7 +110,7 @@ jobs:
|
||||
RESEND_API_KEY: 'dummy_key_for_ci_only'
|
||||
AWS_REGION: 'us-west-2'
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run build
|
||||
run: bunx turbo run build --filter=sim
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
|
||||
@@ -238,7 +238,7 @@ export const ServiceBlock: BlockConfig = {
|
||||
bgColor: '#hexcolor',
|
||||
icon: ServiceIcon,
|
||||
subBlocks: [ /* see SubBlock Properties */ ],
|
||||
tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}` } },
|
||||
tools: { access: ['service_action'], config: { tool: (p) => `service_${p.operation}`, params: (p) => ({ /* type coercions here */ }) } },
|
||||
inputs: { /* ... */ },
|
||||
outputs: { /* ... */ },
|
||||
}
|
||||
@@ -246,6 +246,8 @@ export const ServiceBlock: BlockConfig = {
|
||||
|
||||
Register in `blocks/registry.ts` (alphabetically).
|
||||
|
||||
**Important:** `tools.config.tool` runs during serialization (before variable resolution). Never do `Number()` or other type coercions there — dynamic references like `<Block.output>` will be destroyed. Use `tools.config.params` for type coercions (it runs during execution, after variables are resolved).
|
||||
|
||||
**SubBlock Properties:**
|
||||
```typescript
|
||||
{
|
||||
|
||||
@@ -1157,6 +1157,17 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AlgoliaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 50 50'>
|
||||
<path
|
||||
fill='#FFFFFF'
|
||||
d='M25,0C11.3,0,0.2,11,0,24.6C-0.2,38.4,11,49.9,24.8,50c4.3,0,8.4-1,12-3c0.4-0.2,0.4-0.7,0.1-1l-2.3-2.1 c-0.5-0.4-1.2-0.5-1.7-0.3c-2.5,1.1-5.3,1.6-8.2,1.6c-11.2-0.1-20.2-9.4-20-20.6C4.9,13.6,13.9,4.7,25,4.7h20.3v36L33.7,30.5 c-0.4-0.3-0.9-0.3-1.2,0.1c-1.8,2.4-4.9,4-8.2,3.7c-4.6-0.3-8.4-4-8.7-8.7c-0.4-5.5,4-10.2,9.4-10.2c4.9,0,9,3.8,9.4,8.6 c0,0.4,0.2,0.8,0.6,1.1l3,2.7c0.3,0.3,0.9,0.1,1-0.3c0.2-1.2,0.3-2.4,0.2-3.6c-0.5-7-6.2-12.7-13.2-13.1c-8.1-0.5-14.8,5.8-15,13.7 c-0.2,7.7,6.1,14.4,13.8,14.5c3.2,0.1,6.2-0.9,8.6-2.7l15,13.3c0.6,0.6,1.7,0.1,1.7-0.7v-48C50,0.4,49.5,0,49,0L25,0 C25,0,25,0,25,0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||
@@ -5737,3 +5748,86 @@ export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function UpstashIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 256 341' width='24' height='24'>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M0 298.417c56.554 56.553 148.247 56.553 204.801 0c56.554-56.554 56.554-148.247 0-204.801l-25.6 25.6c42.415 42.416 42.415 111.185 0 153.6c-42.416 42.416-111.185 42.416-153.601 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M51.2 247.216c28.277 28.277 74.123 28.277 102.4 0c28.277-28.276 28.277-74.123 0-102.4l-25.6 25.6c14.14 14.138 14.14 37.061 0 51.2c-14.138 14.139-37.061 14.139-51.2 0zM256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RevenueCatIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='512'
|
||||
height='512'
|
||||
viewBox='0 0 512 512'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M95 109.774C110.152 106.108 133.612 104 154.795 104C212.046 104 246.32 123.928 246.32 174.646C246.32 205.746 233.737 226.264 214.005 237.437L261.765 318.946C258.05 321.632 250.035 323.176 238.864 323.176C226.282 323.176 217.987 321.672 211.982 318.946L172.225 248.3H167.645C157.789 248.305 147.945 247.601 138.18 246.192V319.255C134.172 321.672 127.022 323.176 116.73 323.176C106.73 323.176 99.2874 321.659 95 319.255V109.774ZM137.643 207.848C145.772 209.263 153.997 209.968 162.235 209.956C187.12 209.956 202.285 200.556 202.285 177.057C202.285 152.886 186.268 142.949 157.668 142.949C150.956 142.918 144.255 143.515 137.643 144.735V207.848Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
<path
|
||||
d='M428.529 329.244C428.529 365.526 410.145 375.494 396.306 382.195C360.972 399.32 304.368 379.4 244.206 373.338C189.732 366.214 135.706 361.522 127.309 373.738C124.152 376.832 123.481 386.798 127.309 390.862C138.604 402.85 168.061 394.493 188.919 390.714C195.391 389.694 201.933 392.099 206.079 397.021C210.226 401.944 211.349 408.637 209.024 414.58C206.699 420.522 201.28 424.811 194.809 425.831C185.379 427.264 175.85 427.989 166.306 428C145.988 428 120.442 424.495 105.943 409.072C98.7232 401.4 91.3266 387.78 97.0271 366.465C107.875 326.074 172.807 336.052 248.033 343.633C300.41 348.907 357.23 366.465 379.934 350.343C385.721 346.234 396.517 337.022 390.698 329.244C384.879 321.467 375.353 325.684 362.838 325.684C300.152 325.684 263.238 285.302 263.238 217.916C263.247 167.292 284.176 131.892 318.287 115.09C333.109 107.789 350.421 104 369.587 104C386.292 104 403.269 106.931 414.11 113.366C420.847 123.032 423.778 140.305 422.306 153.201C408.247 146.466 395.36 142.949 378.669 142.949C337.365 142.949 308.947 164.039 308.947 214.985C308.947 265.932 337.065 286.149 376.611 286.149C387.869 286.035 403.1 284.67 422.306 282.053C426.455 297.498 428.529 313.228 428.529 329.244Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RedisIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='0 0 512 512'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
strokeLinejoin='round'
|
||||
strokeMiterlimit='2'
|
||||
>
|
||||
<path
|
||||
d='M479.14 279.864c-34.584 43.578-71.94 93.385-146.645 93.385-66.73 0-91.59-58.858-93.337-106.672 14.62 30.915 43.203 55.949 87.804 54.792C412.737 318.6 471.53 241.127 471.53 170.57c0-84.388-62.947-145.262-172.24-145.262-78.165 0-175.004 29.743-238.646 76.782-.689 48.42 26.286 111.369 35.972 104.452 55.17-39.67 98.918-65.203 141.35-78.01C175.153 198.58 24.451 361.219 6 389.85c2.076 26.286 34.588 96.842 50.496 96.842 4.841 0 8.993-2.768 13.835-7.61 45.433-51.046 82.472-96.816 115.412-140.933 4.627 64.658 36.42 143.702 125.307 143.702 79.55 0 158.408-57.414 194.377-186.767 4.149-15.911-15.22-28.362-26.286-15.22zm-90.616-104.449c0 40.81-40.118 60.87-76.782 60.87-19.596 0-34.648-5.145-46.554-11.832 21.906-33.168 43.59-67.182 66.887-103.593 41.08 6.953 56.449 29.788 56.449 54.555z'
|
||||
fill='#FFFFFF'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function HexIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
|
||||
<path
|
||||
fill='#5F509D'
|
||||
fillRule='evenodd'
|
||||
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
AhrefsIcon,
|
||||
AirtableIcon,
|
||||
AirweaveIcon,
|
||||
AlgoliaIcon,
|
||||
ApifyIcon,
|
||||
ApolloIcon,
|
||||
ArxivIcon,
|
||||
@@ -53,6 +54,7 @@ import {
|
||||
GrafanaIcon,
|
||||
GrainIcon,
|
||||
GreptileIcon,
|
||||
HexIcon,
|
||||
HubspotIcon,
|
||||
HuggingFaceIcon,
|
||||
HunterIOIcon,
|
||||
@@ -98,8 +100,10 @@ import {
|
||||
QdrantIcon,
|
||||
RDSIcon,
|
||||
RedditIcon,
|
||||
RedisIcon,
|
||||
ReductoIcon,
|
||||
ResendIcon,
|
||||
RevenueCatIcon,
|
||||
S3Icon,
|
||||
SalesforceIcon,
|
||||
SearchIcon,
|
||||
@@ -127,6 +131,7 @@ import {
|
||||
TTSIcon,
|
||||
TwilioIcon,
|
||||
TypeformIcon,
|
||||
UpstashIcon,
|
||||
VercelIcon,
|
||||
VideoIcon,
|
||||
WealthboxIcon,
|
||||
@@ -148,6 +153,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
ahrefs: AhrefsIcon,
|
||||
airtable: AirtableIcon,
|
||||
airweave: AirweaveIcon,
|
||||
algolia: AlgoliaIcon,
|
||||
apify: ApifyIcon,
|
||||
apollo: ApolloIcon,
|
||||
arxiv: ArxivIcon,
|
||||
@@ -191,6 +197,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
grafana: GrafanaIcon,
|
||||
grain: GrainIcon,
|
||||
greptile: GreptileIcon,
|
||||
hex: HexIcon,
|
||||
hubspot: HubspotIcon,
|
||||
huggingface: HuggingFaceIcon,
|
||||
hunter: HunterIOIcon,
|
||||
@@ -236,8 +243,10 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
qdrant: QdrantIcon,
|
||||
rds: RDSIcon,
|
||||
reddit: RedditIcon,
|
||||
redis: RedisIcon,
|
||||
reducto_v2: ReductoIcon,
|
||||
resend: ResendIcon,
|
||||
revenuecat: RevenueCatIcon,
|
||||
s3: S3Icon,
|
||||
salesforce: SalesforceIcon,
|
||||
search: SearchIcon,
|
||||
@@ -267,6 +276,7 @@ export const blockTypeToIconMap: Record<string, IconComponent> = {
|
||||
twilio_sms: TwilioIcon,
|
||||
twilio_voice: TwilioIcon,
|
||||
typeform: TypeformIcon,
|
||||
upstash: UpstashIcon,
|
||||
vercel: VercelIcon,
|
||||
video_generator_v2: VideoIcon,
|
||||
vision_v2: EyeIcon,
|
||||
|
||||
404
apps/docs/content/docs/en/tools/algolia.mdx
Normal file
404
apps/docs/content/docs/en/tools/algolia.mdx
Normal file
@@ -0,0 +1,404 @@
|
||||
---
|
||||
title: Algolia
|
||||
description: Search and manage Algolia indices
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="algolia"
|
||||
color="#003DFF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Algolia](https://www.algolia.com/) is a powerful hosted search platform that enables developers and teams to deliver fast, relevant search experiences in their apps and websites. Algolia provides full-text, faceted, and filtered search as well as analytics and advanced ranking capabilities.
|
||||
|
||||
With Algolia, you can:
|
||||
|
||||
- **Deliver lightning-fast search**: Provide instant search results as users type, with typo tolerance and synonyms
|
||||
- **Manage and update records**: Easily add, update, or delete objects/records in your indices
|
||||
- **Perform advanced filtering**: Use filters, facets, and custom ranking to refine and organize search results
|
||||
- **Configure index settings**: Adjust relevance, ranking, attributes for search, and more to optimize user experience
|
||||
- **Scale confidently**: Algolia handles massive traffic and data volumes with globally distributed infrastructure
|
||||
- **Gain insights**: Track analytics, search patterns, and user engagement
|
||||
|
||||
In Sim, the Algolia integration allows your agents to search, manage, and configure Algolia indices directly within your workflows. Use Algolia to power dynamic data exploration, automate record updates, run batch operations, and more—all from a single tool in your workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Algolia into your workflow. Search indices, manage records (add, update, delete, browse), configure index settings, and perform batch operations.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `algolia_search`
|
||||
|
||||
Search an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to search |
|
||||
| `query` | string | Yes | Search query text |
|
||||
| `hitsPerPage` | number | No | Number of hits per page \(default: 20\) |
|
||||
| `page` | number | No | Page number to retrieve \(default: 0\) |
|
||||
| `filters` | string | No | Filter string \(e.g., "category:electronics AND price < 100"\) |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `hits` | array | Array of matching records |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
| ↳ `_highlightResult` | object | Highlighted attributes matching the query. Each attribute has value, matchLevel \(none, partial, full\), and matchedWords |
|
||||
| ↳ `_snippetResult` | object | Snippeted attributes matching the query. Each attribute has value and matchLevel |
|
||||
| ↳ `_rankingInfo` | object | Ranking information for the hit. Only present when getRankingInfo is enabled |
|
||||
| ↳ `nbTypos` | number | Number of typos in the query match |
|
||||
| ↳ `firstMatchedWord` | number | Position of the first matched word |
|
||||
| ↳ `geoDistance` | number | Distance in meters for geo-search results |
|
||||
| ↳ `nbExactWords` | number | Number of exactly matched words |
|
||||
| ↳ `userScore` | number | Custom ranking score |
|
||||
| ↳ `words` | number | Number of matched words |
|
||||
| `nbHits` | number | Total number of matching hits |
|
||||
| `page` | number | Current page number \(zero-based\) |
|
||||
| `nbPages` | number | Total number of pages available |
|
||||
| `hitsPerPage` | number | Number of hits per page \(1-1000, default 20\) |
|
||||
| `processingTimeMS` | number | Server-side processing time in milliseconds |
|
||||
| `query` | string | The search query that was executed |
|
||||
| `parsedQuery` | string | The query string after normalization and stop word removal |
|
||||
| `facets` | object | Facet counts keyed by facet name, each containing value-count pairs |
|
||||
| `facets_stats` | object | Statistics \(min, max, avg, sum\) for numeric facets |
|
||||
| `exhaustive` | object | Exhaustiveness flags for facetsCount, facetValues, nbHits, rulesMatch, and typo |
|
||||
|
||||
### `algolia_add_record`
|
||||
|
||||
Add or replace a record in an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | No | Object ID for the record \(auto-generated if not provided\) |
|
||||
| `record` | json | Yes | JSON object representing the record to add |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the indexing operation |
|
||||
| `objectID` | string | The object ID of the added or replaced record |
|
||||
| `createdAt` | string | Timestamp when the record was created \(only present when objectID is auto-generated\) |
|
||||
| `updatedAt` | string | Timestamp when the record was updated \(only present when replacing an existing record\) |
|
||||
|
||||
### `algolia_get_record`
|
||||
|
||||
Get a record by objectID from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to retrieve |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `objectID` | string | The objectID of the retrieved record |
|
||||
| `record` | object | The record data \(all attributes\) |
|
||||
|
||||
### `algolia_get_records`
|
||||
|
||||
Retrieve multiple records by objectID from one or more Algolia indices
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Default index name for all requests |
|
||||
| `requests` | json | Yes | Array of objects specifying records to retrieve. Each must have "objectID" and optionally "indexName" and "attributesToRetrieve". |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `results` | array | Array of retrieved records \(null entries for records not found\) |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
|
||||
### `algolia_partial_update_record`
|
||||
|
||||
Partially update a record in an Algolia index without replacing it entirely
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to update |
|
||||
| `attributes` | json | Yes | JSON object with attributes to update. Supports built-in operations like \{"stock": \{"_operation": "Decrement", "value": 1\}\} |
|
||||
| `createIfNotExists` | boolean | No | Whether to create the record if it does not exist \(default: true\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the update operation |
|
||||
| `objectID` | string | The objectID of the updated record |
|
||||
| `updatedAt` | string | Timestamp when the record was updated |
|
||||
|
||||
### `algolia_delete_record`
|
||||
|
||||
Delete a record by objectID from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `objectID` | string | Yes | The objectID of the record to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the deletion |
|
||||
| `deletedAt` | string | Timestamp when the record was deleted |
|
||||
|
||||
### `algolia_browse_records`
|
||||
|
||||
Browse and iterate over all records in an Algolia index using cursor pagination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key \(must have browse ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to browse |
|
||||
| `query` | string | No | Search query to filter browsed records |
|
||||
| `filters` | string | No | Filter string to narrow down results |
|
||||
| `attributesToRetrieve` | string | No | Comma-separated list of attributes to retrieve |
|
||||
| `hitsPerPage` | number | No | Number of hits per page \(default: 1000, max: 1000\) |
|
||||
| `cursor` | string | No | Cursor from a previous browse response for pagination |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `hits` | array | Array of records from the index \(up to 1000 per request\) |
|
||||
| ↳ `objectID` | string | Unique identifier of the record |
|
||||
| `cursor` | string | Opaque cursor string for retrieving the next page of results. Absent when no more results exist. |
|
||||
| `nbHits` | number | Total number of records matching the browse criteria |
|
||||
| `page` | number | Current page number \(zero-based\) |
|
||||
| `nbPages` | number | Total number of pages available |
|
||||
| `hitsPerPage` | number | Number of hits per page \(1-1000, default 1000 for browse\) |
|
||||
| `processingTimeMS` | number | Server-side processing time in milliseconds |
|
||||
|
||||
### `algolia_batch_operations`
|
||||
|
||||
Perform batch add, update, partial update, or delete operations on records in an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `requests` | json | Yes | Array of batch operations. Each item has "action" \(addObject, updateObject, partialUpdateObject, partialUpdateObjectNoCreate, deleteObject\) and "body" \(the record data, must include objectID for update/delete\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the batch operation |
|
||||
| `objectIDs` | array | Array of object IDs affected by the batch operation |
|
||||
|
||||
### `algolia_list_indices`
|
||||
|
||||
List all indices in an Algolia application
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `page` | number | No | Page number for paginating indices \(default: not paginated\) |
|
||||
| `hitsPerPage` | number | No | Number of indices per page \(default: 100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `indices` | array | List of indices in the application |
|
||||
| ↳ `name` | string | Name of the index |
|
||||
| ↳ `entries` | number | Number of records in the index |
|
||||
| ↳ `dataSize` | number | Size of the index data in bytes |
|
||||
| ↳ `fileSize` | number | Size of the index files in bytes |
|
||||
| ↳ `lastBuildTimeS` | number | Last build duration in seconds |
|
||||
| ↳ `numberOfPendingTasks` | number | Number of pending indexing tasks |
|
||||
| ↳ `pendingTask` | boolean | Whether the index has pending tasks |
|
||||
| ↳ `createdAt` | string | Timestamp when the index was created |
|
||||
| ↳ `updatedAt` | string | Timestamp when the index was last updated |
|
||||
| ↳ `primary` | string | Name of the primary index \(if this is a replica\) |
|
||||
| ↳ `replicas` | array | List of replica index names |
|
||||
| ↳ `virtual` | boolean | Whether the index is a virtual replica |
|
||||
| `nbPages` | number | Total number of pages of indices |
|
||||
|
||||
### `algolia_get_settings`
|
||||
|
||||
Retrieve the settings of an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia API Key |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `searchableAttributes` | array | List of searchable attributes |
|
||||
| `attributesForFaceting` | array | Attributes used for faceting |
|
||||
| `ranking` | array | Ranking criteria |
|
||||
| `customRanking` | array | Custom ranking criteria |
|
||||
| `replicas` | array | List of replica index names |
|
||||
| `hitsPerPage` | number | Default number of hits per page |
|
||||
| `maxValuesPerFacet` | number | Maximum number of facet values returned |
|
||||
| `highlightPreTag` | string | HTML tag inserted before highlighted parts |
|
||||
| `highlightPostTag` | string | HTML tag inserted after highlighted parts |
|
||||
| `paginationLimitedTo` | number | Maximum number of hits accessible via pagination |
|
||||
|
||||
### `algolia_update_settings`
|
||||
|
||||
Update the settings of an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have editSettings ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `settings` | json | Yes | JSON object with settings to update \(e.g., \{"searchableAttributes": \["name", "description"\], "customRanking": \["desc\(popularity\)"\]\}\) |
|
||||
| `forwardToReplicas` | boolean | No | Whether to apply changes to replica indices \(default: false\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the settings update |
|
||||
| `updatedAt` | string | Timestamp when the settings were updated |
|
||||
|
||||
### `algolia_delete_index`
|
||||
|
||||
Delete an entire Algolia index and all its records
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the index deletion |
|
||||
| `deletedAt` | string | Timestamp when the index was deleted |
|
||||
|
||||
### `algolia_copy_move_index`
|
||||
|
||||
Copy or move an Algolia index to a new destination
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key |
|
||||
| `indexName` | string | Yes | Name of the source index |
|
||||
| `operation` | string | Yes | Operation to perform: "copy" or "move" |
|
||||
| `destination` | string | Yes | Name of the destination index |
|
||||
| `scope` | json | No | Array of scopes to copy \(only for "copy" operation\): \["settings", "synonyms", "rules"\]. Omit to copy everything including records. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the copy/move operation |
|
||||
| `updatedAt` | string | Timestamp when the operation was performed |
|
||||
|
||||
### `algolia_clear_records`
|
||||
|
||||
Clear all records from an Algolia index while keeping settings, synonyms, and rules
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index to clear |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the clear operation |
|
||||
| `updatedAt` | string | Timestamp when the records were cleared |
|
||||
|
||||
### `algolia_delete_by_filter`
|
||||
|
||||
Delete all records matching a filter from an Algolia index
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `applicationId` | string | Yes | Algolia Application ID |
|
||||
| `apiKey` | string | Yes | Algolia Admin API Key \(must have deleteIndex ACL\) |
|
||||
| `indexName` | string | Yes | Name of the Algolia index |
|
||||
| `filters` | string | No | Filter expression to match records for deletion \(e.g., "category:outdated"\) |
|
||||
| `facetFilters` | json | No | Array of facet filters \(e.g., \["brand:Acme"\]\) |
|
||||
| `numericFilters` | json | No | Array of numeric filters \(e.g., \["price > 100"\]\) |
|
||||
| `tagFilters` | json | No | Array of tag filters using the _tags attribute \(e.g., \["published"\]\) |
|
||||
| `aroundLatLng` | string | No | Coordinates for geo-search filter \(e.g., "40.71,-74.01"\) |
|
||||
| `aroundRadius` | number | No | Maximum radius in meters for geo-search, or "all" for unlimited |
|
||||
| `insideBoundingBox` | json | No | Bounding box coordinates as \[\[lat1, lng1, lat2, lng2\]\] for geo-search filter |
|
||||
| `insidePolygon` | json | No | Polygon coordinates as \[\[lat1, lng1, lat2, lng2, lat3, lng3, ...\]\] for geo-search filter |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `taskID` | number | Algolia task ID for tracking the delete-by-filter operation |
|
||||
| `updatedAt` | string | Timestamp when the operation was performed |
|
||||
|
||||
|
||||
459
apps/docs/content/docs/en/tools/hex.mdx
Normal file
459
apps/docs/content/docs/en/tools/hex.mdx
Normal file
@@ -0,0 +1,459 @@
|
||||
---
|
||||
title: Hex
|
||||
description: Run and manage Hex projects
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="hex"
|
||||
color="#F5E6FF"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Hex](https://hex.tech/) is a collaborative platform for analytics and data science that allows you to build, run, and share interactive data projects and notebooks. Hex lets teams work together on data exploration, transformation, and visualization, making it easy to turn analysis into shareable insights.
|
||||
|
||||
With Hex, you can:
|
||||
|
||||
- **Create and run powerful notebooks**: Blend SQL, Python, and visualizations in a single, interactive workspace.
|
||||
- **Collaborate and share**: Work together with teammates in real time and publish interactive data apps for broader audiences.
|
||||
- **Automate and orchestrate workflows**: Schedule notebook runs, parameterize runs with inputs, and automate data tasks.
|
||||
- **Visualize and communicate results**: Turn analysis results into dashboards or interactive apps that anyone can use.
|
||||
- **Integrate with your data stack**: Connect easily to data warehouses, APIs, and other sources.
|
||||
|
||||
The Sim Hex integration allows your AI agents or workflows to:
|
||||
|
||||
- List, get, and manage Hex projects directly from Sim.
|
||||
- Trigger and monitor notebook runs, check their statuses, or cancel them as part of larger automation flows.
|
||||
- Retrieve run results and use them within Sim-powered processes and decision-making.
|
||||
- Leverage Hex’s interactive analytics capabilities right inside your automated Sim workflows.
|
||||
|
||||
Whether you’re empowering analysts, automating reporting, or embedding actionable data into your processes, Hex and Sim provide a seamless way to operationalize analytics and bring data-driven insights to your team.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `hex_cancel_run`
|
||||
|
||||
Cancel an active Hex project run.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `runId` | string | Yes | The UUID of the run to cancel |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `success` | boolean | Whether the run was successfully cancelled |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID that was cancelled |
|
||||
|
||||
### `hex_create_collection`
|
||||
|
||||
Create a new collection in the Hex workspace to organize projects.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `name` | string | Yes | Name for the new collection |
|
||||
| `description` | string | No | Optional description for the collection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Newly created collection UUID |
|
||||
| `name` | string | Collection name |
|
||||
| `description` | string | Collection description |
|
||||
| `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
|
||||
### `hex_get_collection`
|
||||
|
||||
Retrieve details for a specific Hex collection by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `collectionId` | string | Yes | The UUID of the collection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Collection UUID |
|
||||
| `name` | string | Collection name |
|
||||
| `description` | string | Collection description |
|
||||
| `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
|
||||
### `hex_get_data_connection`
|
||||
|
||||
Retrieve details for a specific data connection including type, description, and configuration flags.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `dataConnectionId` | string | Yes | The UUID of the data connection |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Connection UUID |
|
||||
| `name` | string | Connection name |
|
||||
| `type` | string | Connection type \(e.g., snowflake, postgres, bigquery\) |
|
||||
| `description` | string | Connection description |
|
||||
| `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
|
||||
| `includeMagic` | boolean | Whether Magic AI features are enabled |
|
||||
| `allowWritebackCells` | boolean | Whether writeback cells are allowed |
|
||||
|
||||
### `hex_get_group`
|
||||
|
||||
Retrieve details for a specific Hex group.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `groupId` | string | Yes | The UUID of the group |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Group UUID |
|
||||
| `name` | string | Group name |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
|
||||
### `hex_get_project`
|
||||
|
||||
Get metadata and details for a specific Hex project by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Project UUID |
|
||||
| `title` | string | Project title |
|
||||
| `description` | string | Project description |
|
||||
| `status` | object | Project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| `categories` | array | Project categories |
|
||||
| ↳ `name` | string | Category name |
|
||||
| ↳ `description` | string | Category description |
|
||||
| `lastEditedAt` | string | ISO 8601 last edited timestamp |
|
||||
| `lastPublishedAt` | string | ISO 8601 last published timestamp |
|
||||
| `createdAt` | string | ISO 8601 creation timestamp |
|
||||
| `archivedAt` | string | ISO 8601 archived timestamp |
|
||||
| `trashedAt` | string | ISO 8601 trashed timestamp |
|
||||
|
||||
### `hex_get_project_runs`
|
||||
|
||||
Retrieve API-triggered runs for a Hex project with optional filtering by status and pagination.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `limit` | number | No | Maximum number of runs to return \(1-100, default: 25\) |
|
||||
| `offset` | number | No | Offset for paginated results \(default: 0\) |
|
||||
| `statusFilter` | string | No | Filter by run status: PENDING, RUNNING, ERRORED, COMPLETED, KILLED, UNABLE_TO_ALLOCATE_KERNEL |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `runs` | array | List of project runs |
|
||||
| ↳ `projectId` | string | Project UUID |
|
||||
| ↳ `runId` | string | Run UUID |
|
||||
| ↳ `runUrl` | string | URL to view the run |
|
||||
| ↳ `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
|
||||
| ↳ `startTime` | string | Run start time |
|
||||
| ↳ `endTime` | string | Run end time |
|
||||
| ↳ `elapsedTime` | number | Elapsed time in seconds |
|
||||
| ↳ `traceId` | string | Trace ID |
|
||||
| ↳ `projectVersion` | number | Project version number |
|
||||
| `total` | number | Total number of runs returned |
|
||||
| `traceId` | string | Top-level trace ID |
|
||||
|
||||
### `hex_get_queried_tables`
|
||||
|
||||
Return the warehouse tables queried by a Hex project, including data connection and table names.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `limit` | number | No | Maximum number of tables to return \(1-100\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `tables` | array | List of warehouse tables queried by the project |
|
||||
| ↳ `dataConnectionId` | string | Data connection UUID |
|
||||
| ↳ `dataConnectionName` | string | Data connection name |
|
||||
| ↳ `tableName` | string | Table name |
|
||||
| `total` | number | Total number of tables returned |
|
||||
|
||||
### `hex_get_run_status`
|
||||
|
||||
Check the status of a Hex project run by its run ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project |
|
||||
| `runId` | string | Yes | The UUID of the run to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID |
|
||||
| `runUrl` | string | URL to view the run |
|
||||
| `status` | string | Run status \(PENDING, RUNNING, COMPLETED, ERRORED, KILLED, UNABLE_TO_ALLOCATE_KERNEL\) |
|
||||
| `startTime` | string | ISO 8601 run start time |
|
||||
| `endTime` | string | ISO 8601 run end time |
|
||||
| `elapsedTime` | number | Elapsed time in seconds |
|
||||
| `traceId` | string | Trace ID for debugging |
|
||||
| `projectVersion` | number | Project version number |
|
||||
|
||||
### `hex_list_collections`
|
||||
|
||||
List all collections in the Hex workspace.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of collections to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: NAME |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `collections` | array | List of collections |
|
||||
| ↳ `id` | string | Collection UUID |
|
||||
| ↳ `name` | string | Collection name |
|
||||
| ↳ `description` | string | Collection description |
|
||||
| ↳ `creator` | object | Collection creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `id` | string | Creator UUID |
|
||||
| `total` | number | Total number of collections returned |
|
||||
|
||||
### `hex_list_data_connections`
|
||||
|
||||
List all data connections in the Hex workspace (e.g., Snowflake, PostgreSQL, BigQuery).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of connections to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `connections` | array | List of data connections |
|
||||
| ↳ `id` | string | Connection UUID |
|
||||
| ↳ `name` | string | Connection name |
|
||||
| ↳ `type` | string | Connection type \(e.g., athena, bigquery, databricks, postgres, redshift, snowflake\) |
|
||||
| ↳ `description` | string | Connection description |
|
||||
| ↳ `connectViaSsh` | boolean | Whether SSH tunneling is enabled |
|
||||
| ↳ `includeMagic` | boolean | Whether Magic AI features are enabled |
|
||||
| ↳ `allowWritebackCells` | boolean | Whether writeback cells are allowed |
|
||||
| `total` | number | Total number of connections returned |
|
||||
|
||||
### `hex_list_groups`
|
||||
|
||||
List all groups in the Hex workspace with optional sorting.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of groups to return \(1-500, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: CREATED_AT or NAME |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `groups` | array | List of workspace groups |
|
||||
| ↳ `id` | string | Group UUID |
|
||||
| ↳ `name` | string | Group name |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| `total` | number | Total number of groups returned |
|
||||
|
||||
### `hex_list_projects`
|
||||
|
||||
List all projects in your Hex workspace with optional filtering by status.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of projects to return \(1-100\) |
|
||||
| `includeArchived` | boolean | No | Include archived projects in results |
|
||||
| `statusFilter` | string | No | Filter by status: PUBLISHED, DRAFT, or ALL |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projects` | array | List of Hex projects |
|
||||
| ↳ `id` | string | Project UUID |
|
||||
| ↳ `title` | string | Project title |
|
||||
| ↳ `description` | string | Project description |
|
||||
| ↳ `status` | object | Project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| ↳ `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| ↳ `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| ↳ `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| ↳ `lastEditedAt` | string | Last edited timestamp |
|
||||
| ↳ `lastPublishedAt` | string | Last published timestamp |
|
||||
| ↳ `createdAt` | string | Creation timestamp |
|
||||
| ↳ `archivedAt` | string | Archived timestamp |
|
||||
| `total` | number | Total number of projects returned |
|
||||
|
||||
### `hex_list_users`
|
||||
|
||||
List all users in the Hex workspace with optional filtering and sorting.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `limit` | number | No | Maximum number of users to return \(1-100, default: 25\) |
|
||||
| `sortBy` | string | No | Sort by field: NAME or EMAIL |
|
||||
| `sortDirection` | string | No | Sort direction: ASC or DESC |
|
||||
| `groupId` | string | No | Filter users by group UUID |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `users` | array | List of workspace users |
|
||||
| ↳ `id` | string | User UUID |
|
||||
| ↳ `name` | string | User name |
|
||||
| ↳ `email` | string | User email |
|
||||
| ↳ `role` | string | User role \(ADMIN, MANAGER, EDITOR, EXPLORER, MEMBER, GUEST, EMBEDDED_USER, ANONYMOUS\) |
|
||||
| `total` | number | Total number of users returned |
|
||||
|
||||
### `hex_run_project`
|
||||
|
||||
Execute a published Hex project. Optionally pass input parameters and control caching behavior.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project to run |
|
||||
| `inputParams` | json | No | JSON object of input parameters for the project \(e.g., \{"date": "2024-01-01"\}\) |
|
||||
| `dryRun` | boolean | No | If true, perform a dry run without executing the project |
|
||||
| `updateCache` | boolean | No | \(Deprecated\) If true, update the cached results after execution |
|
||||
| `updatePublishedResults` | boolean | No | If true, update the published app results after execution |
|
||||
| `useCachedSqlResults` | boolean | No | If true, use cached SQL results instead of re-running queries |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `projectId` | string | Project UUID |
|
||||
| `runId` | string | Run UUID |
|
||||
| `runUrl` | string | URL to view the run |
|
||||
| `runStatusUrl` | string | URL to check run status |
|
||||
| `traceId` | string | Trace ID for debugging |
|
||||
| `projectVersion` | number | Project version number |
|
||||
|
||||
### `hex_update_project`
|
||||
|
||||
Update a Hex project status label (e.g., endorsement or custom workspace statuses).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | Hex API token \(Personal or Workspace\) |
|
||||
| `projectId` | string | Yes | The UUID of the Hex project to update |
|
||||
| `status` | string | Yes | New project status name \(custom workspace status label\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `id` | string | Project UUID |
|
||||
| `title` | string | Project title |
|
||||
| `description` | string | Project description |
|
||||
| `status` | object | Updated project status |
|
||||
| ↳ `name` | string | Status name \(e.g., PUBLISHED, DRAFT\) |
|
||||
| `type` | string | Project type \(PROJECT or COMPONENT\) |
|
||||
| `creator` | object | Project creator |
|
||||
| ↳ `email` | string | Creator email |
|
||||
| `owner` | object | Project owner |
|
||||
| ↳ `email` | string | Owner email |
|
||||
| `categories` | array | Project categories |
|
||||
| ↳ `name` | string | Category name |
|
||||
| ↳ `description` | string | Category description |
|
||||
| `lastEditedAt` | string | Last edited timestamp |
|
||||
| `lastPublishedAt` | string | Last published timestamp |
|
||||
| `createdAt` | string | Creation timestamp |
|
||||
| `archivedAt` | string | Archived timestamp |
|
||||
| `trashedAt` | string | Trashed timestamp |
|
||||
|
||||
|
||||
@@ -116,7 +116,7 @@ Create a new service request in Jira Service Management
|
||||
| `summary` | string | Yes | Summary/title for the service request |
|
||||
| `description` | string | No | Description for the service request |
|
||||
| `raiseOnBehalfOf` | string | No | Account ID of customer to raise request on behalf of |
|
||||
| `requestFieldValues` | json | No | Custom field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `requestFieldValues` | json | No | Request field values as key-value pairs \(overrides summary/description if provided\) |
|
||||
| `requestParticipants` | string | No | Comma-separated account IDs to add as request participants |
|
||||
| `channel` | string | No | Channel the request originates from \(e.g., portal, email\) |
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"ahrefs",
|
||||
"airtable",
|
||||
"airweave",
|
||||
"algolia",
|
||||
"apify",
|
||||
"apollo",
|
||||
"arxiv",
|
||||
@@ -48,6 +49,7 @@
|
||||
"grafana",
|
||||
"grain",
|
||||
"greptile",
|
||||
"hex",
|
||||
"hubspot",
|
||||
"huggingface",
|
||||
"hunter",
|
||||
@@ -93,8 +95,10 @@
|
||||
"qdrant",
|
||||
"rds",
|
||||
"reddit",
|
||||
"redis",
|
||||
"reducto",
|
||||
"resend",
|
||||
"revenuecat",
|
||||
"s3",
|
||||
"salesforce",
|
||||
"search",
|
||||
@@ -125,6 +129,7 @@
|
||||
"twilio_sms",
|
||||
"twilio_voice",
|
||||
"typeform",
|
||||
"upstash",
|
||||
"vercel",
|
||||
"video_generator",
|
||||
"vision",
|
||||
|
||||
452
apps/docs/content/docs/en/tools/redis.mdx
Normal file
452
apps/docs/content/docs/en/tools/redis.mdx
Normal file
@@ -0,0 +1,452 @@
|
||||
---
|
||||
title: Redis
|
||||
description: Key-value operations with Redis
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="redis"
|
||||
color="#FF4438"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Redis](https://redis.io/) is an open-source, in-memory data structure store, used as a distributed key-value database, cache, and message broker. Redis supports a variety of data structures including strings, hashes, lists, sets, and more, making it highly flexible for different application scenarios.
|
||||
|
||||
With Redis, you can:
|
||||
|
||||
- **Store and retrieve key-value data instantly**: Use Redis as a fast database, cache, or session store for high performance.
|
||||
- **Work with multiple data structures**: Manage not just strings, but also lists, hashes, sets, sorted sets, streams, and bitmaps.
|
||||
- **Perform atomic operations**: Safely manipulate data using atomic commands and transactions.
|
||||
- **Support pub/sub messaging**: Use Redis’s publisher/subscriber features for real-time event handling and messaging.
|
||||
- **Set automatic expiration policies**: Assign TTLs to keys for caching and time-sensitive data.
|
||||
- **Scale horizontally**: Use Redis Cluster for sharding, high availability, and scalable workloads.
|
||||
|
||||
In Sim, the Redis integration lets your agents connect to any Redis-compatible instance to perform key-value, hash, list, and utility operations. You can build workflows that involve storing, retrieving, or manipulating data in Redis, or manage your app’s cache, sessions, or real-time messaging, directly within your Sim workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to any Redis instance to perform key-value, hash, list, and utility operations via a direct connection.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `redis_get`
|
||||
|
||||
Get the value of a key from Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was retrieved |
|
||||
| `value` | string | The value of the key, or null if the key does not exist |
|
||||
|
||||
### `redis_set`
|
||||
|
||||
Set the value of a key in Redis with an optional expiration time in seconds.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
| `ex` | number | No | Expiration time in seconds \(optional\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `result` | string | The result of the SET operation \(typically "OK"\) |
|
||||
|
||||
### `redis_delete`
|
||||
|
||||
Delete a key from Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was deleted |
|
||||
| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) |
|
||||
|
||||
### `redis_keys`
|
||||
|
||||
List all keys matching a pattern in Redis. Avoid using on large databases in production; use the Redis Command tool with SCAN for large key spaces.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `pattern` | string | No | Pattern to match keys \(default: * for all keys\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `pattern` | string | The pattern used to match keys |
|
||||
| `keys` | array | List of keys matching the pattern |
|
||||
| `count` | number | Number of keys found |
|
||||
|
||||
### `redis_command`
|
||||
|
||||
Execute a raw Redis command as a JSON array (e.g. [
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `command` | string | Yes | Redis command as a JSON array \(e.g. \["SET", "key", "value"\]\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `command` | string | The command that was executed |
|
||||
| `result` | json | The result of the command |
|
||||
|
||||
### `redis_hset`
|
||||
|
||||
Set a field in a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name within the hash |
|
||||
| `value` | string | Yes | The value to set for the field |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was set |
|
||||
| `result` | number | Number of fields added \(1 if new, 0 if updated\) |
|
||||
|
||||
### `redis_hget`
|
||||
|
||||
Get the value of a field in a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was retrieved |
|
||||
| `value` | string | The field value, or null if the field or key does not exist |
|
||||
|
||||
### `redis_hgetall`
|
||||
|
||||
Get all fields and values of a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `fields` | object | All field-value pairs in the hash as a key-value object. Empty object if the key does not exist. |
|
||||
| `fieldCount` | number | Number of fields in the hash |
|
||||
|
||||
### `redis_hdel`
|
||||
|
||||
Delete a field from a hash stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was deleted |
|
||||
| `deleted` | number | Number of fields removed \(1 if deleted, 0 if field did not exist\) |
|
||||
|
||||
### `redis_incr`
|
||||
|
||||
Increment the integer value of a key by one in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after increment |
|
||||
|
||||
### `redis_incrby`
|
||||
|
||||
Increment the integer value of a key by a given amount in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
| `increment` | number | Yes | Amount to increment by \(negative to decrement\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after increment |
|
||||
|
||||
### `redis_expire`
|
||||
|
||||
Set an expiration time (in seconds) on a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set expiration on |
|
||||
| `seconds` | number | Yes | Timeout in seconds |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that expiration was set on |
|
||||
| `result` | number | 1 if the timeout was set, 0 if the key does not exist |
|
||||
|
||||
### `redis_ttl`
|
||||
|
||||
Get the remaining time to live (in seconds) of a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to check TTL for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `ttl` | number | Remaining TTL in seconds. Positive integer if TTL set, -1 if no expiration, -2 if key does not exist. |
|
||||
|
||||
### `redis_persist`
|
||||
|
||||
Remove the expiration from a key in Redis, making it persist indefinitely.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to persist |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was persisted |
|
||||
| `result` | number | 1 if the expiration was removed, 0 if the key does not exist or has no expiration |
|
||||
|
||||
### `redis_lpush`
|
||||
|
||||
Prepend a value to a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to prepend |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | Length of the list after the push |
|
||||
|
||||
### `redis_rpush`
|
||||
|
||||
Append a value to the end of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to append |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | Length of the list after the push |
|
||||
|
||||
### `redis_lpop`
|
||||
|
||||
Remove and return the first element of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `value` | string | The removed element, or null if the list is empty |
|
||||
|
||||
### `redis_rpop`
|
||||
|
||||
Remove and return the last element of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `value` | string | The removed element, or null if the list is empty |
|
||||
|
||||
### `redis_llen`
|
||||
|
||||
Get the length of a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | The length of the list, or 0 if the key does not exist |
|
||||
|
||||
### `redis_lrange`
|
||||
|
||||
Get a range of elements from a list stored at a key in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `start` | number | Yes | Start index \(0-based\) |
|
||||
| `stop` | number | Yes | Stop index \(-1 for all elements\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `values` | array | List elements in the specified range |
|
||||
| `count` | number | Number of elements returned |
|
||||
|
||||
### `redis_exists`
|
||||
|
||||
Check if a key exists in Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `exists` | boolean | Whether the key exists \(true\) or not \(false\) |
|
||||
|
||||
### `redis_setnx`
|
||||
|
||||
Set the value of a key in Redis only if the key does not already exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `url` | string | Yes | Redis connection URL \(e.g. redis://user:password@host:port\) |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) |
|
||||
|
||||
|
||||
456
apps/docs/content/docs/en/tools/revenuecat.mdx
Normal file
456
apps/docs/content/docs/en/tools/revenuecat.mdx
Normal file
@@ -0,0 +1,456 @@
|
||||
---
|
||||
title: RevenueCat
|
||||
description: Manage in-app subscriptions and entitlements
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="revenuecat"
|
||||
color="#F25A5A"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[RevenueCat](https://www.revenuecat.com/) is a subscription management platform that enables you to easily set up, manage, and analyze in-app subscriptions for your apps. With RevenueCat, you can handle the complexities of in-app purchases across platforms like iOS, Android, and web—all through a single unified API.
|
||||
|
||||
With RevenueCat, you can:
|
||||
|
||||
- **Manage subscribers**: Track user subscriptions, entitlements, and purchases across all platforms in real time
|
||||
- **Simplify implementation**: Integrate RevenueCat’s SDKs to abstract away App Store and Play Store purchase logic
|
||||
- **Automate entitlement logic**: Define and manage what features users should receive when they purchase or renew
|
||||
- **Analyze revenue**: Access dashboards and analytics to view churn, LTV, revenue, active subscriptions, and more
|
||||
- **Grant or revoke entitlements**: Manually adjust user access (for example, for customer support or promotions)
|
||||
- **Operate globally**: Support purchases, refunds, and promotions worldwide with ease
|
||||
|
||||
In Sim, the RevenueCat integration allows your agents to fetch and manage subscriber data, review and update entitlements, and automate subscription-related workflows. Use RevenueCat to centralize subscription operations for your apps directly within your Sim workspace.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate RevenueCat into the workflow. Manage subscribers, entitlements, offerings, and Google Play subscriptions. Retrieve customer subscription status, grant or revoke promotional entitlements, record purchases, update subscriber attributes, and manage Google Play subscription billing.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `revenuecat_get_customer`
|
||||
|
||||
Retrieve subscriber information by app user ID
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The subscriber object with subscriptions and entitlements |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
| `metadata` | object | Subscriber summary metadata |
|
||||
| ↳ `app_user_id` | string | The app user ID |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when the subscriber was first seen |
|
||||
| ↳ `active_entitlements` | number | Number of active entitlements |
|
||||
| ↳ `active_subscriptions` | number | Number of active subscriptions |
|
||||
|
||||
### `revenuecat_delete_customer`
|
||||
|
||||
Permanently delete a subscriber and all associated data
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `deleted` | boolean | Whether the subscriber was deleted |
|
||||
| `app_user_id` | string | The deleted app user ID |
|
||||
|
||||
### `revenuecat_create_purchase`
|
||||
|
||||
Record a purchase (receipt) for a subscriber via the REST API
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat API key \(public or secret\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `fetchToken` | string | Yes | The receipt token or purchase token from the store \(App Store receipt, Google Play purchase token, or Stripe subscription ID\) |
|
||||
| `productId` | string | Yes | The product identifier for the purchase |
|
||||
| `price` | number | No | The price of the product in the currency specified |
|
||||
| `currency` | string | No | ISO 4217 currency code \(e.g., USD, EUR\) |
|
||||
| `isRestore` | boolean | No | Whether this is a restore of a previous purchase |
|
||||
| `platform` | string | No | Platform of the purchase \(ios, android, amazon, macos, stripe\). Required for Stripe and Paddle purchases. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after recording the purchase |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_grant_entitlement`
|
||||
|
||||
Grant a promotional entitlement to a subscriber
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `entitlementIdentifier` | string | Yes | The entitlement identifier to grant |
|
||||
| `duration` | string | Yes | Duration of the entitlement \(daily, three_day, weekly, monthly, two_month, three_month, six_month, yearly, lifetime\) |
|
||||
| `startTimeMs` | number | No | Optional start time in milliseconds since Unix epoch. Set to a past time to achieve custom durations shorter than daily. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after granting the entitlement |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_revoke_entitlement`
|
||||
|
||||
Revoke all promotional entitlements for a specific entitlement identifier
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `entitlementIdentifier` | string | Yes | The entitlement identifier to revoke |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after revoking the entitlement |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_list_offerings`
|
||||
|
||||
List all offerings configured for the project
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat API key |
|
||||
| `appUserId` | string | Yes | An app user ID to retrieve offerings for |
|
||||
| `platform` | string | No | Platform to filter offerings \(ios, android, stripe, etc.\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `current_offering_id` | string | The identifier of the current offering |
|
||||
| `offerings` | array | List of offerings |
|
||||
| ↳ `identifier` | string | Offering identifier |
|
||||
| ↳ `description` | string | Offering description |
|
||||
| ↳ `packages` | array | List of packages in the offering |
|
||||
| ↳ `identifier` | string | Package identifier |
|
||||
| ↳ `platform_product_identifier` | string | Platform-specific product identifier |
|
||||
| `metadata` | object | Offerings metadata |
|
||||
| ↳ `count` | number | Number of offerings returned |
|
||||
| ↳ `current_offering_id` | string | Current offering identifier |
|
||||
|
||||
### `revenuecat_update_subscriber_attributes`
|
||||
|
||||
Update custom subscriber attributes (e.g., $email, $displayName, or custom key-value pairs)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `attributes` | json | Yes | JSON object of attributes to set. Each key maps to an object with a "value" field. Example: \{"$email": \{"value": "user@example.com"\}, "$displayName": \{"value": "John"\}\} |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `updated` | boolean | Whether the subscriber attributes were successfully updated |
|
||||
| `app_user_id` | string | The app user ID of the updated subscriber |
|
||||
|
||||
### `revenuecat_defer_google_subscription`
|
||||
|
||||
Defer a Google Play subscription by extending its billing date by a number of days (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to defer \(use the part before the colon for products set up after Feb 2023\) |
|
||||
| `extendByDays` | number | Yes | Number of days to extend the subscription by \(1-365\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after deferring the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_refund_google_subscription`
|
||||
|
||||
Refund and optionally revoke a Google Play subscription (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to refund |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after refunding the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
### `revenuecat_revoke_google_subscription`
|
||||
|
||||
Immediately revoke access to a Google Play subscription and issue a refund (Google Play only)
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `apiKey` | string | Yes | RevenueCat secret API key \(sk_...\) |
|
||||
| `appUserId` | string | Yes | The app user ID of the subscriber |
|
||||
| `productId` | string | Yes | The Google Play product identifier of the subscription to revoke |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `subscriber` | object | The updated subscriber object after revoking the Google subscription |
|
||||
| ↳ `first_seen` | string | ISO 8601 date when subscriber was first seen |
|
||||
| ↳ `original_app_user_id` | string | Original app user ID |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of original purchase |
|
||||
| ↳ `management_url` | string | URL for managing the subscriber subscriptions |
|
||||
| ↳ `subscriptions` | object | Map of product identifiers to subscription objects |
|
||||
| ↳ `store_transaction_id` | string | Store transaction identifier |
|
||||
| ↳ `original_transaction_id` | string | Original transaction identifier |
|
||||
| ↳ `purchase_date` | string | ISO 8601 purchase date |
|
||||
| ↳ `original_purchase_date` | string | ISO 8601 date of the original purchase |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `is_sandbox` | boolean | Whether this is a sandbox purchase |
|
||||
| ↳ `unsubscribe_detected_at` | string | ISO 8601 date when unsubscribe was detected |
|
||||
| ↳ `billing_issues_detected_at` | string | ISO 8601 date when billing issues were detected |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `ownership_type` | string | Ownership type \(purchased, family_shared\) |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional, prepaid\) |
|
||||
| ↳ `store` | string | Store the subscription was purchased from \(app_store, play_store, stripe, etc.\) |
|
||||
| ↳ `refunded_at` | string | ISO 8601 date when subscription was refunded |
|
||||
| ↳ `auto_resume_date` | string | ISO 8601 date when a paused subscription will auto-resume |
|
||||
| ↳ `product_plan_identifier` | string | Google Play base plan identifier \(for products set up after Feb 2023\) |
|
||||
| ↳ `entitlements` | object | Map of entitlement identifiers to entitlement objects |
|
||||
| ↳ `grant_date` | string | ISO 8601 grant date |
|
||||
| ↳ `expires_date` | string | ISO 8601 expiration date |
|
||||
| ↳ `product_identifier` | string | Product identifier |
|
||||
| ↳ `is_active` | boolean | Whether the entitlement is active |
|
||||
| ↳ `will_renew` | boolean | Whether the entitlement will renew |
|
||||
| ↳ `period_type` | string | Period type \(normal, trial, intro, promotional\) |
|
||||
| ↳ `purchase_date` | string | ISO 8601 date of the latest purchase or renewal |
|
||||
| ↳ `store` | string | Store the entitlement was granted from |
|
||||
| ↳ `grace_period_expires_date` | string | ISO 8601 grace period expiration date |
|
||||
| ↳ `non_subscriptions` | object | Map of non-subscription product identifiers to arrays of purchase objects |
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
---
|
||||
title: Slack
|
||||
description: Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events
|
||||
description: Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
@@ -59,7 +59,7 @@ If you encounter issues with the Slack integration, contact us at [help@sim.ai](
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.
|
||||
|
||||
|
||||
|
||||
@@ -80,6 +80,7 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
||||
| `dmUserId` | string | No | Slack user ID for direct messages \(e.g., U1234567890\) |
|
||||
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
|
||||
| `threadTs` | string | No | Thread timestamp to reply to \(creates thread reply\) |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
| `files` | file[] | No | Files to attach to the message |
|
||||
|
||||
#### Output
|
||||
@@ -146,6 +147,29 @@ Send messages to Slack channels or direct messages. Supports Slack mrkdwn format
|
||||
| `fileCount` | number | Number of files uploaded \(when files are attached\) |
|
||||
| `files` | file[] | Files attached to the message |
|
||||
|
||||
### `slack_ephemeral_message`
|
||||
|
||||
Send an ephemeral message visible only to a specific user in a channel. Optionally reply in a thread. The message does not persist across sessions.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authMethod` | string | No | Authentication method: oauth or bot_token |
|
||||
| `botToken` | string | No | Bot token for Custom Bot |
|
||||
| `channel` | string | Yes | Slack channel ID \(e.g., C1234567890\) |
|
||||
| `user` | string | Yes | User ID who will see the ephemeral message \(e.g., U1234567890\). Must be a member of the channel. |
|
||||
| `text` | string | Yes | Message text to send \(supports Slack mrkdwn formatting\) |
|
||||
| `threadTs` | string | No | Thread timestamp to reply in. When provided, the ephemeral message appears as a thread reply. |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `messageTs` | string | Timestamp of the ephemeral message \(cannot be used with chat.update\) |
|
||||
| `channel` | string | Channel ID where the ephemeral message was sent |
|
||||
|
||||
### `slack_canvas`
|
||||
|
||||
Create and share Slack canvases in channels. Canvases are collaborative documents within Slack.
|
||||
@@ -682,6 +706,7 @@ Update a message previously sent by the bot in Slack
|
||||
| `channel` | string | Yes | Channel ID where the message was posted \(e.g., C1234567890\) |
|
||||
| `timestamp` | string | Yes | Timestamp of the message to update \(e.g., 1405894322.002768\) |
|
||||
| `text` | string | Yes | New message text \(supports Slack mrkdwn formatting\) |
|
||||
| `blocks` | json | No | Block Kit layout blocks as a JSON array. When provided, text becomes the fallback notification text. |
|
||||
|
||||
#### Output
|
||||
|
||||
|
||||
357
apps/docs/content/docs/en/tools/upstash.mdx
Normal file
357
apps/docs/content/docs/en/tools/upstash.mdx
Normal file
@@ -0,0 +1,357 @@
|
||||
---
|
||||
title: Upstash
|
||||
description: Serverless Redis with Upstash
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="upstash"
|
||||
color="#181C1E"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Upstash](https://upstash.com/) is a serverless data platform designed for modern applications that need fast, simple, and scalable data storage with minimal setup. Upstash specializes in providing Redis and Kafka as fully managed, pay-per-request cloud services, making it a popular choice for developers building serverless, edge, and event-driven architectures.
|
||||
|
||||
With Upstash Redis, you can:
|
||||
|
||||
- **Store and retrieve data instantly**: Read and write key-value pairs, hashes, lists, sets, and more—all over a high-performance REST API.
|
||||
- **Scale serverlessly**: No infrastructure to manage. Upstash automatically scales with your app and charges only for what you use.
|
||||
- **Access globally**: Deploy near your users with multi-region support and global distribution.
|
||||
- **Integrate easily**: Use Upstash’s REST API in serverless functions, edge workers, Next.js, Vercel, Cloudflare Workers, and more.
|
||||
- **Automate with scripts**: Run Lua scripts for advanced transactions and automation.
|
||||
- **Ensure security**: Protect your data with built-in authentication and TLS encryption.
|
||||
|
||||
In Sim, the Upstash integration empowers your agents and workflows to read, write, and manage data in Upstash Redis using simple, unified commands—perfect for building scalable automations, caching results, managing queues, and more, all without dealing with server management.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Connect to Upstash Redis to perform key-value, hash, list, and utility operations via the REST API.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `upstash_redis_get`
|
||||
|
||||
Get the value of a key from Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was retrieved |
|
||||
| `value` | json | The value of the key \(string\), or null if not found |
|
||||
|
||||
### `upstash_redis_set`
|
||||
|
||||
Set the value of a key in Upstash Redis with an optional expiration time in seconds.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store |
|
||||
| `ex` | number | No | Expiration time in seconds \(optional\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was set |
|
||||
| `result` | string | The result of the SET operation \(typically "OK"\) |
|
||||
|
||||
### `upstash_redis_delete`
|
||||
|
||||
Delete a key from Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to delete |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was deleted |
|
||||
| `deletedCount` | number | Number of keys deleted \(0 if key did not exist, 1 if deleted\) |
|
||||
|
||||
### `upstash_redis_keys`
|
||||
|
||||
List keys matching a pattern in Upstash Redis. Defaults to listing all keys (*).
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `pattern` | string | No | Pattern to match keys \(e.g., "user:*"\). Defaults to "*" for all keys. |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `pattern` | string | The pattern used to match keys |
|
||||
| `keys` | array | List of keys matching the pattern |
|
||||
| `count` | number | Number of keys found |
|
||||
|
||||
### `upstash_redis_command`
|
||||
|
||||
Execute an arbitrary Redis command against Upstash Redis. Pass the full command as a JSON array (e.g., [
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `command` | string | Yes | Redis command as a JSON array \(e.g., \["HSET", "myhash", "field1", "value1"\]\) or a simple command string \(e.g., "PING"\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `command` | string | The command that was executed |
|
||||
| `result` | json | The result of the Redis command |
|
||||
|
||||
### `upstash_redis_hset`
|
||||
|
||||
Set a field in a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name within the hash |
|
||||
| `value` | string | Yes | The value to store in the hash field |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was set |
|
||||
| `result` | number | Number of new fields added \(0 if field was updated, 1 if new\) |
|
||||
|
||||
### `upstash_redis_hget`
|
||||
|
||||
Get the value of a field in a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
| `field` | string | Yes | The field name to retrieve |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `field` | string | The field that was retrieved |
|
||||
| `value` | json | The value of the hash field \(string\), or null if not found |
|
||||
|
||||
### `upstash_redis_hgetall`
|
||||
|
||||
Get all fields and values of a hash stored at a key in Upstash Redis.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The hash key |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The hash key |
|
||||
| `fields` | object | All field-value pairs in the hash, keyed by field name |
|
||||
| `fieldCount` | number | Number of fields in the hash |
|
||||
|
||||
### `upstash_redis_incr`
|
||||
|
||||
Atomically increment the integer value of a key by one in Upstash Redis. If the key does not exist, it is set to 0 before incrementing.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after incrementing |
|
||||
|
||||
### `upstash_redis_expire`
|
||||
|
||||
Set a timeout on a key in Upstash Redis. After the timeout, the key is deleted.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set expiration on |
|
||||
| `seconds` | number | Yes | Timeout in seconds |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that expiration was set on |
|
||||
| `result` | number | 1 if the timeout was set, 0 if the key does not exist |
|
||||
|
||||
### `upstash_redis_ttl`
|
||||
|
||||
Get the remaining time to live of a key in Upstash Redis. Returns -1 if the key has no expiration, -2 if the key does not exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to check TTL for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key checked |
|
||||
| `ttl` | number | Remaining TTL in seconds. Positive integer if the key has a TTL set, -1 if the key exists with no expiration, -2 if the key does not exist. |
|
||||
|
||||
### `upstash_redis_lpush`
|
||||
|
||||
Prepend a value to the beginning of a list in Upstash Redis. Creates the list if it does not exist.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `value` | string | Yes | The value to prepend to the list |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `length` | number | The length of the list after the push |
|
||||
|
||||
### `upstash_redis_lrange`
|
||||
|
||||
Get a range of elements from a list in Upstash Redis. Use 0 and -1 for start and stop to get all elements.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The list key |
|
||||
| `start` | number | Yes | Start index \(0-based, negative values count from end\) |
|
||||
| `stop` | number | Yes | Stop index \(inclusive, -1 for last element\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The list key |
|
||||
| `values` | array | List of elements in the specified range |
|
||||
| `count` | number | Number of elements returned |
|
||||
|
||||
### `upstash_redis_exists`
|
||||
|
||||
Check if a key exists in Upstash Redis. Returns true if the key exists, false otherwise.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to check |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was checked |
|
||||
| `exists` | boolean | Whether the key exists \(true\) or not \(false\) |
|
||||
|
||||
### `upstash_redis_setnx`
|
||||
|
||||
Set the value of a key only if it does not already exist. Returns true if the key was set, false if it already existed.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to set |
|
||||
| `value` | string | Yes | The value to store if the key does not exist |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was attempted to set |
|
||||
| `wasSet` | boolean | Whether the key was set \(true\) or already existed \(false\) |
|
||||
|
||||
### `upstash_redis_incrby`
|
||||
|
||||
Increment the integer value of a key by a given amount. Use a negative value to decrement. If the key does not exist, it is set to 0 before the operation.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `restUrl` | string | Yes | Upstash Redis REST URL |
|
||||
| `restToken` | string | Yes | Upstash Redis REST Token |
|
||||
| `key` | string | Yes | The key to increment |
|
||||
| `increment` | number | Yes | Amount to increment by \(use negative value to decrement\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type | Description |
|
||||
| --------- | ---- | ----------- |
|
||||
| `key` | string | The key that was incremented |
|
||||
| `value` | number | The new value after incrementing |
|
||||
|
||||
|
||||
274
apps/sim/app/(auth)/oauth/consent/page.tsx
Normal file
274
apps/sim/app/(auth)/oauth/consent/page.tsx
Normal file
@@ -0,0 +1,274 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useState } from 'react'
|
||||
import { ArrowLeftRight } from 'lucide-react'
|
||||
import Image from 'next/image'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { signOut, useSession } from '@/lib/auth/auth-client'
|
||||
import { inter } from '@/app/_styles/fonts/inter/inter'
|
||||
import { soehne } from '@/app/_styles/fonts/soehne/soehne'
|
||||
import { BrandedButton } from '@/app/(auth)/components/branded-button'
|
||||
|
||||
const SCOPE_DESCRIPTIONS: Record<string, string> = {
|
||||
openid: 'Verify your identity',
|
||||
profile: 'Access your basic profile information',
|
||||
email: 'View your email address',
|
||||
offline_access: 'Maintain access when you are not actively using the app',
|
||||
'mcp:tools': 'Use Sim workflows and tools on your behalf',
|
||||
} as const
|
||||
|
||||
interface ClientInfo {
|
||||
clientId: string
|
||||
name: string
|
||||
icon: string
|
||||
}
|
||||
|
||||
export default function OAuthConsentPage() {
|
||||
const router = useRouter()
|
||||
const searchParams = useSearchParams()
|
||||
const { data: session } = useSession()
|
||||
const consentCode = searchParams.get('consent_code')
|
||||
const clientId = searchParams.get('client_id')
|
||||
const scope = searchParams.get('scope')
|
||||
|
||||
const [clientInfo, setClientInfo] = useState<ClientInfo | null>(null)
|
||||
const [loading, setLoading] = useState(true)
|
||||
const [submitting, setSubmitting] = useState(false)
|
||||
const [error, setError] = useState<string | null>(null)
|
||||
|
||||
const scopes = scope?.split(' ').filter(Boolean) ?? []
|
||||
|
||||
useEffect(() => {
|
||||
if (!clientId) {
|
||||
setLoading(false)
|
||||
setError('The authorization request is missing a required client identifier.')
|
||||
return
|
||||
}
|
||||
|
||||
fetch(`/api/auth/oauth2/client/${encodeURIComponent(clientId)}`, { credentials: 'include' })
|
||||
.then(async (res) => {
|
||||
if (!res.ok) return
|
||||
const data = await res.json()
|
||||
setClientInfo(data)
|
||||
})
|
||||
.catch(() => {})
|
||||
.finally(() => {
|
||||
setLoading(false)
|
||||
})
|
||||
}, [clientId])
|
||||
|
||||
const handleConsent = useCallback(
|
||||
async (accept: boolean) => {
|
||||
if (!consentCode) {
|
||||
setError('The authorization request is missing a required consent code.')
|
||||
return
|
||||
}
|
||||
|
||||
setSubmitting(true)
|
||||
try {
|
||||
const res = await fetch('/api/auth/oauth2/consent', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
credentials: 'include',
|
||||
body: JSON.stringify({ accept, consent_code: consentCode }),
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.json().catch(() => null)
|
||||
setError(
|
||||
(body as Record<string, string> | null)?.message ??
|
||||
'The consent request could not be processed. Please try again.'
|
||||
)
|
||||
setSubmitting(false)
|
||||
return
|
||||
}
|
||||
|
||||
const data = (await res.json()) as { redirectURI?: string }
|
||||
if (data.redirectURI) {
|
||||
window.location.href = data.redirectURI
|
||||
} else {
|
||||
setError('The server did not return a redirect. Please try again.')
|
||||
setSubmitting(false)
|
||||
}
|
||||
} catch {
|
||||
setError('Something went wrong. Please try again.')
|
||||
setSubmitting(false)
|
||||
}
|
||||
},
|
||||
[consentCode]
|
||||
)
|
||||
|
||||
const handleSwitchAccount = useCallback(async () => {
|
||||
if (!consentCode) return
|
||||
|
||||
const res = await fetch(`/api/auth/oauth2/authorize-params?consent_code=${consentCode}`, {
|
||||
credentials: 'include',
|
||||
})
|
||||
if (!res.ok) {
|
||||
setError('Unable to switch accounts. Please re-initiate the connection.')
|
||||
return
|
||||
}
|
||||
|
||||
const params = (await res.json()) as Record<string, string | null>
|
||||
const authorizeUrl = new URL('/api/auth/oauth2/authorize', window.location.origin)
|
||||
for (const [key, value] of Object.entries(params)) {
|
||||
if (value) authorizeUrl.searchParams.set(key, value)
|
||||
}
|
||||
|
||||
await signOut({
|
||||
fetchOptions: {
|
||||
onSuccess: () => {
|
||||
window.location.href = authorizeUrl.toString()
|
||||
},
|
||||
},
|
||||
})
|
||||
}, [consentCode])
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorize Application
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
Loading application details...
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorization Error
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
{error}
|
||||
</p>
|
||||
</div>
|
||||
<div className={`${inter.className} mt-8 w-full max-w-[410px] space-y-3`}>
|
||||
<BrandedButton onClick={() => router.push('/')}>Return to Home</BrandedButton>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
const clientName = clientInfo?.name ?? clientId
|
||||
|
||||
return (
|
||||
<div className='flex flex-col items-center justify-center'>
|
||||
<div className='mb-6 flex items-center gap-4'>
|
||||
{clientInfo?.icon ? (
|
||||
<img
|
||||
src={clientInfo.icon}
|
||||
alt={clientName ?? 'Application'}
|
||||
width={48}
|
||||
height={48}
|
||||
className='rounded-[10px]'
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-12 w-12 items-center justify-center rounded-[10px] bg-muted font-medium text-[18px] text-muted-foreground'>
|
||||
{(clientName ?? '?').charAt(0).toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
<ArrowLeftRight className='h-5 w-5 text-muted-foreground' />
|
||||
<Image
|
||||
src='/new/logo/colorized-bg.svg'
|
||||
alt='Sim'
|
||||
width={48}
|
||||
height={48}
|
||||
className='rounded-[10px]'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='space-y-1 text-center'>
|
||||
<h1 className={`${soehne.className} font-medium text-[32px] text-black tracking-tight`}>
|
||||
Authorize Application
|
||||
</h1>
|
||||
<p className={`${inter.className} font-[380] text-[16px] text-muted-foreground`}>
|
||||
<span className='font-medium text-foreground'>{clientName}</span> is requesting access to
|
||||
your account
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{session?.user && (
|
||||
<div
|
||||
className={`${inter.className} mt-5 flex items-center gap-3 rounded-lg border px-4 py-3`}
|
||||
>
|
||||
{session.user.image ? (
|
||||
<Image
|
||||
src={session.user.image}
|
||||
alt={session.user.name ?? 'User'}
|
||||
width={32}
|
||||
height={32}
|
||||
className='rounded-full'
|
||||
unoptimized
|
||||
/>
|
||||
) : (
|
||||
<div className='flex h-8 w-8 items-center justify-center rounded-full bg-muted font-medium text-[13px] text-muted-foreground'>
|
||||
{(session.user.name ?? session.user.email ?? '?').charAt(0).toUpperCase()}
|
||||
</div>
|
||||
)}
|
||||
<div className='min-w-0'>
|
||||
{session.user.name && (
|
||||
<p className='truncate font-medium text-[14px]'>{session.user.name}</p>
|
||||
)}
|
||||
<p className='truncate text-[13px] text-muted-foreground'>{session.user.email}</p>
|
||||
</div>
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleSwitchAccount}
|
||||
className='ml-auto text-[13px] text-muted-foreground underline-offset-2 transition-colors hover:text-foreground hover:underline'
|
||||
>
|
||||
Switch
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{scopes.length > 0 && (
|
||||
<div className={`${inter.className} mt-5 w-full max-w-[410px]`}>
|
||||
<div className='rounded-lg border p-4'>
|
||||
<p className='mb-3 font-medium text-[14px]'>This will allow the application to:</p>
|
||||
<ul className='space-y-2'>
|
||||
{scopes.map((s) => (
|
||||
<li
|
||||
key={s}
|
||||
className='flex items-start gap-2 font-normal text-[13px] text-muted-foreground'
|
||||
>
|
||||
<span className='mt-0.5 text-green-500'>✓</span>
|
||||
<span>{SCOPE_DESCRIPTIONS[s] ?? s}</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className={`${inter.className} mt-6 flex w-full max-w-[410px] gap-3`}>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='md'
|
||||
className='px-6 py-2'
|
||||
disabled={submitting}
|
||||
onClick={() => handleConsent(false)}
|
||||
>
|
||||
Deny
|
||||
</Button>
|
||||
<BrandedButton
|
||||
fullWidth
|
||||
showArrow={false}
|
||||
loading={submitting}
|
||||
loadingText='Authorizing'
|
||||
onClick={() => handleConsent(true)}
|
||||
>
|
||||
Allow
|
||||
</BrandedButton>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -23,7 +23,8 @@ export function ThemeProvider({ children, ...props }: ThemeProviderProps) {
|
||||
pathname.startsWith('/chat') ||
|
||||
pathname.startsWith('/studio') ||
|
||||
pathname.startsWith('/resume') ||
|
||||
pathname.startsWith('/form')
|
||||
pathname.startsWith('/form') ||
|
||||
pathname.startsWith('/oauth')
|
||||
|
||||
return (
|
||||
<NextThemesProvider
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockLogger, createMockRequest } from '@sim/testing'
|
||||
import { createMockLogger, createMockRequest, mockHybridAuth } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
describe('OAuth Token API Routes', () => {
|
||||
@@ -12,7 +12,7 @@ describe('OAuth Token API Routes', () => {
|
||||
const mockRefreshTokenIfNeeded = vi.fn()
|
||||
const mockGetOAuthToken = vi.fn()
|
||||
const mockAuthorizeCredentialUse = vi.fn()
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
let mockCheckSessionOrInternalAuth: ReturnType<typeof vi.fn>
|
||||
|
||||
const mockLogger = createMockLogger()
|
||||
|
||||
@@ -41,9 +41,7 @@ describe('OAuth Token API Routes', () => {
|
||||
authorizeCredentialUse: mockAuthorizeCredentialUse,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
|
||||
}))
|
||||
;({ mockCheckSessionOrInternalAuth } = mockHybridAuth())
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
@@ -73,23 +71,18 @@ describe('OAuth Token API Routes', () => {
|
||||
refreshed: false,
|
||||
})
|
||||
|
||||
// Create mock request
|
||||
const req = createMockRequest('POST', {
|
||||
credentialId: 'credential-id',
|
||||
})
|
||||
|
||||
// Import handler after setting up mocks
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
// Call handler
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
// Verify request was handled correctly
|
||||
expect(response.status).toBe(200)
|
||||
expect(data).toHaveProperty('accessToken', 'fresh-token')
|
||||
|
||||
// Verify mocks were called correctly
|
||||
expect(mockAuthorizeCredentialUse).toHaveBeenCalled()
|
||||
expect(mockGetCredential).toHaveBeenCalled()
|
||||
expect(mockRefreshTokenIfNeeded).toHaveBeenCalled()
|
||||
|
||||
59
apps/sim/app/api/auth/oauth2/authorize-params/route.ts
Normal file
59
apps/sim/app/api/auth/oauth2/authorize-params/route.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { db } from '@sim/db'
|
||||
import { verification } from '@sim/db/schema'
|
||||
import { and, eq, gt } from 'drizzle-orm'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
|
||||
/**
|
||||
* Returns the original OAuth authorize parameters stored in the verification record
|
||||
* for a given consent code. Used by the consent page to reconstruct the authorize URL
|
||||
* when switching accounts.
|
||||
*/
|
||||
export async function GET(request: NextRequest) {
|
||||
const session = await getSession()
|
||||
if (!session?.user) {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const consentCode = request.nextUrl.searchParams.get('consent_code')
|
||||
if (!consentCode) {
|
||||
return NextResponse.json({ error: 'consent_code is required' }, { status: 400 })
|
||||
}
|
||||
|
||||
const [record] = await db
|
||||
.select({ value: verification.value })
|
||||
.from(verification)
|
||||
.where(and(eq(verification.identifier, consentCode), gt(verification.expiresAt, new Date())))
|
||||
.limit(1)
|
||||
|
||||
if (!record) {
|
||||
return NextResponse.json({ error: 'Invalid or expired consent code' }, { status: 404 })
|
||||
}
|
||||
|
||||
const data = JSON.parse(record.value) as {
|
||||
clientId: string
|
||||
redirectURI: string
|
||||
scope: string[]
|
||||
userId: string
|
||||
codeChallenge: string
|
||||
codeChallengeMethod: string
|
||||
state: string | null
|
||||
nonce: string | null
|
||||
}
|
||||
|
||||
if (data.userId !== session.user.id) {
|
||||
return NextResponse.json({ error: 'Forbidden' }, { status: 403 })
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
client_id: data.clientId,
|
||||
redirect_uri: data.redirectURI,
|
||||
scope: data.scope.join(' '),
|
||||
code_challenge: data.codeChallenge,
|
||||
code_challenge_method: data.codeChallengeMethod,
|
||||
state: data.state,
|
||||
nonce: data.nonce,
|
||||
response_type: 'code',
|
||||
})
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
@@ -94,9 +94,7 @@ vi.mock('@/lib/core/utils/sse', () => ({
|
||||
},
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: vi.fn().mockResolvedValue({ decrypted: 'test-password' }),
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import type { NextResponse } from 'next/server'
|
||||
/**
|
||||
* Tests for chat API utils
|
||||
@@ -37,9 +37,7 @@ vi.mock('@/lib/core/security/encryption', () => ({
|
||||
decryptSecret: mockDecryptSecret,
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: true,
|
||||
|
||||
@@ -2,6 +2,7 @@ import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -28,13 +29,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth } = mockHybridAuth()
|
||||
mockCheckSessionOrInternalAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
createMockRequest,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -34,13 +35,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckInternalAuth } = mockHybridAuth()
|
||||
mockCheckInternalAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -1,4 +1,10 @@
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -28,13 +34,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckHybridAuth } = mockHybridAuth()
|
||||
mockCheckHybridAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
defaultMockUser,
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
@@ -54,12 +55,11 @@ describe('File Serve API Route', () => {
|
||||
withUploadUtils: true,
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -164,12 +164,11 @@ describe('File Serve API Route', () => {
|
||||
findLocalFile: vi.fn().mockReturnValue('/test/uploads/nested/path/file.txt'),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -225,12 +224,11 @@ describe('File Serve API Route', () => {
|
||||
USE_BLOB_STORAGE: false,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
@@ -290,12 +288,11 @@ describe('File Serve API Route', () => {
|
||||
readFile: vi.fn().mockRejectedValue(new Error('ENOENT: no such file or directory')),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: serveAuthMock } = mockHybridAuth()
|
||||
serveAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(false), // File not found = no access
|
||||
@@ -349,12 +346,11 @@ describe('File Serve API Route', () => {
|
||||
|
||||
for (const test of contentTypeTests) {
|
||||
it(`should serve ${test.ext} file with correct content type`, async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: ctAuthMock } = mockHybridAuth()
|
||||
ctAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'test-user-id',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -3,7 +3,13 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockAuth, mockCryptoUuid, mockUuid, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
mockAuth,
|
||||
mockCryptoUuid,
|
||||
mockHybridAuth,
|
||||
mockUuid,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -27,13 +33,12 @@ function setupFileApiMocks(
|
||||
authMocks.setUnauthenticated()
|
||||
}
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: vi.fn().mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckHybridAuth } = mockHybridAuth()
|
||||
mockCheckHybridAuth.mockResolvedValue({
|
||||
success: authenticated,
|
||||
userId: authenticated ? 'test-user-id' : undefined,
|
||||
error: authenticated ? undefined : 'Unauthorized',
|
||||
})
|
||||
|
||||
vi.doMock('@/app/api/files/authorization', () => ({
|
||||
verifyFileAccess: vi.fn().mockResolvedValue(true),
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockKnowledgeSchemas,
|
||||
requestUtilsMock,
|
||||
} from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -29,9 +30,7 @@ mockKnowledgeSchemas()
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ OPENAI_API_KEY: 'test-api-key' }))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/lib/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: vi.fn().mockImplementation((fn) => fn()),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpAuthorizationServerMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpAuthorizationServerMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { NextRequest, NextResponse } from 'next/server'
|
||||
import type { NextResponse } from 'next/server'
|
||||
import { createMcpProtectedResourceMetadataResponse } from '@/lib/mcp/oauth-discovery'
|
||||
|
||||
export async function GET(request: NextRequest): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse(request)
|
||||
export async function GET(): Promise<NextResponse> {
|
||||
return createMcpProtectedResourceMetadataResponse()
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import { userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { validateOAuthAccessToken } from '@/lib/auth/oauth-token'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import {
|
||||
ORCHESTRATION_TIMEOUT_MS,
|
||||
@@ -31,6 +32,7 @@ import {
|
||||
import { DIRECT_TOOL_DEFS, SUBAGENT_TOOL_DEFS } from '@/lib/copilot/tools/mcp/definitions'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import {
|
||||
authorizeWorkflowByWorkspacePermission,
|
||||
resolveWorkflowIdForUser,
|
||||
@@ -384,12 +386,14 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
...(tool.annotations && { annotations: tool.annotations }),
|
||||
}))
|
||||
|
||||
const subagentTools = SUBAGENT_TOOL_DEFS.map((tool) => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
inputSchema: tool.inputSchema,
|
||||
...(tool.annotations && { annotations: tool.annotations }),
|
||||
}))
|
||||
|
||||
const result: ListToolsResult = {
|
||||
@@ -402,27 +406,51 @@ function buildMcpServer(abortSignal?: AbortSignal): Server {
|
||||
server.setRequestHandler(CallToolRequestSchema, async (request, extra) => {
|
||||
const headers = (extra.requestInfo?.headers || {}) as HeaderMap
|
||||
const apiKeyHeader = readHeader(headers, 'x-api-key')
|
||||
const authorizationHeader = readHeader(headers, 'authorization')
|
||||
|
||||
if (!apiKeyHeader) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: 'AUTHENTICATION ERROR: No Copilot API key provided. The user must set their Copilot API key in the x-api-key header. They can generate one in the Sim app under Settings → Copilot. Do NOT retry — this will fail until the key is configured.',
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
let authResult: CopilotKeyAuthResult = { success: false }
|
||||
|
||||
if (authorizationHeader?.startsWith('Bearer ')) {
|
||||
const token = authorizationHeader.slice(7)
|
||||
const oauthResult = await validateOAuthAccessToken(token)
|
||||
if (oauthResult.success && oauthResult.userId) {
|
||||
if (!oauthResult.scopes?.includes('mcp:tools')) {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: 'AUTHENTICATION ERROR: OAuth token is missing the required "mcp:tools" scope. Re-authorize with the correct scopes.',
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
authResult = { success: true, userId: oauthResult.userId }
|
||||
} else {
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `AUTHENTICATION ERROR: ${oauthResult.error ?? 'Invalid OAuth access token'} Do NOT retry — re-authorize via OAuth.`,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
}
|
||||
}
|
||||
} else if (apiKeyHeader) {
|
||||
authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||
}
|
||||
|
||||
const authResult = await authenticateCopilotApiKey(apiKeyHeader)
|
||||
if (!authResult.success || !authResult.userId) {
|
||||
logger.warn('MCP copilot key auth failed', { method: request.method })
|
||||
const errorMsg = apiKeyHeader
|
||||
? `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`
|
||||
: 'AUTHENTICATION ERROR: No authentication provided. Provide a Bearer token (OAuth 2.1) or an x-api-key header. Generate a Copilot API key in Settings → Copilot.'
|
||||
logger.warn('MCP copilot auth failed', { method: request.method })
|
||||
return {
|
||||
content: [
|
||||
{
|
||||
type: 'text' as const,
|
||||
text: `AUTHENTICATION ERROR: ${authResult.error} Do NOT retry — this will fail until the user fixes their Copilot API key.`,
|
||||
text: errorMsg,
|
||||
},
|
||||
],
|
||||
isError: true,
|
||||
@@ -512,6 +540,20 @@ export async function GET() {
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const hasAuth = request.headers.has('authorization') || request.headers.has('x-api-key')
|
||||
|
||||
if (!hasAuth) {
|
||||
const origin = getBaseUrl().replace(/\/$/, '')
|
||||
const resourceMetadataUrl = `${origin}/.well-known/oauth-protected-resource/api/mcp/copilot`
|
||||
return new NextResponse(JSON.stringify({ error: 'unauthorized' }), {
|
||||
status: 401,
|
||||
headers: {
|
||||
'WWW-Authenticate': `Bearer resource_metadata="${resourceMetadataUrl}", scope="mcp:tools"`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
let parsedBody: unknown
|
||||
|
||||
@@ -532,6 +574,19 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
export async function OPTIONS() {
|
||||
return new NextResponse(null, {
|
||||
status: 204,
|
||||
headers: {
|
||||
'Access-Control-Allow-Origin': '*',
|
||||
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS, DELETE',
|
||||
'Access-Control-Allow-Headers':
|
||||
'Content-Type, Authorization, X-API-Key, X-Requested-With, Accept',
|
||||
'Access-Control-Max-Age': '86400',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export async function DELETE(request: NextRequest) {
|
||||
void request
|
||||
return NextResponse.json(createError(0, -32000, 'Method not allowed.'), { status: 405 })
|
||||
|
||||
@@ -3,10 +3,11 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { mockHybridAuth } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockCheckHybridAuth = vi.fn()
|
||||
let mockCheckHybridAuth: ReturnType<typeof vi.fn>
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
const mockGenerateInternalToken = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
@@ -61,9 +62,7 @@ describe('MCP Serve Route', () => {
|
||||
isDeployed: 'isDeployed',
|
||||
},
|
||||
}))
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: mockCheckHybridAuth,
|
||||
}))
|
||||
;({ mockCheckHybridAuth } = mockHybridAuth())
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: mockGetUserEntityPermissions,
|
||||
}))
|
||||
|
||||
@@ -19,6 +19,7 @@ const configSchema = z.object({
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideTablesTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
|
||||
@@ -20,6 +20,7 @@ const configSchema = z.object({
|
||||
allowedModelProviders: z.array(z.string()).nullable().optional(),
|
||||
hideTraceSpans: z.boolean().optional(),
|
||||
hideKnowledgeBaseTab: z.boolean().optional(),
|
||||
hideTablesTab: z.boolean().optional(),
|
||||
hideCopilot: z.boolean().optional(),
|
||||
hideApiKeysTab: z.boolean().optional(),
|
||||
hideEnvironmentTab: z.boolean().optional(),
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { auditMock, databaseMock, loggerMock } from '@sim/testing'
|
||||
import { auditMock, databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -31,9 +31,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { databaseMock, loggerMock } from '@sim/testing'
|
||||
import { databaseMock, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -43,9 +43,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
isNull: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: () => 'test-request-id',
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, loggerMock } from '@sim/testing'
|
||||
import { createMockRequest, loggerMock, mockHybridAuth } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
@@ -180,13 +180,12 @@ describe('Custom Tools API Routes', () => {
|
||||
getSession: vi.fn().mockResolvedValue(mockSession),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-123',
|
||||
authType: 'session',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: hybridAuthMock } = mockHybridAuth()
|
||||
hybridAuthMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-123',
|
||||
authType: 'session',
|
||||
})
|
||||
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: vi.fn().mockResolvedValue('admin'),
|
||||
@@ -261,12 +260,11 @@ describe('Custom Tools API Routes', () => {
|
||||
'http://localhost:3000/api/tools/custom?workspaceId=workspace-123'
|
||||
)
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth()
|
||||
unauthMock.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
})
|
||||
|
||||
const { GET } = await import('@/app/api/tools/custom/route')
|
||||
|
||||
@@ -297,12 +295,11 @@ describe('Custom Tools API Routes', () => {
|
||||
*/
|
||||
describe('POST /api/tools/custom', () => {
|
||||
it('should reject unauthorized requests', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth()
|
||||
unauthMock.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
})
|
||||
|
||||
const req = createMockRequest('POST', { tools: [], workspaceId: 'workspace-123' })
|
||||
|
||||
@@ -384,13 +381,12 @@ describe('Custom Tools API Routes', () => {
|
||||
})
|
||||
|
||||
it('should prevent unauthorized deletion of user-scoped tool', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-456',
|
||||
authType: 'session',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: diffUserMock } = mockHybridAuth()
|
||||
diffUserMock.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-456',
|
||||
authType: 'session',
|
||||
})
|
||||
|
||||
const userScopedTool = { ...sampleTools[0], workspaceId: null, userId: 'user-123' }
|
||||
const mockLimitUserScoped = vi.fn().mockResolvedValue([userScopedTool])
|
||||
@@ -408,12 +404,11 @@ describe('Custom Tools API Routes', () => {
|
||||
})
|
||||
|
||||
it('should reject unauthorized requests', async () => {
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: vi.fn().mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
}),
|
||||
}))
|
||||
const { mockCheckSessionOrInternalAuth: unauthMock } = mockHybridAuth()
|
||||
unauthMock.mockResolvedValue({
|
||||
success: false,
|
||||
error: 'Unauthorized',
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/tools/custom?id=tool-1')
|
||||
|
||||
|
||||
57
apps/sim/app/api/tools/redis/execute/route.ts
Normal file
57
apps/sim/app/api/tools/redis/execute/route.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import Redis from 'ioredis'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
|
||||
const logger = createLogger('RedisAPI')
|
||||
|
||||
const RequestSchema = z.object({
|
||||
url: z.string().min(1, 'Redis connection URL is required'),
|
||||
command: z.string().min(1, 'Redis command is required'),
|
||||
args: z.array(z.union([z.string(), z.number()])).default([]),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
let client: Redis | null = null
|
||||
|
||||
try {
|
||||
const auth = await checkInternalAuth(request)
|
||||
if (!auth.success || !auth.userId) {
|
||||
return NextResponse.json({ error: auth.error || 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
const body = await request.json()
|
||||
const { url, command, args } = RequestSchema.parse(body)
|
||||
|
||||
client = new Redis(url, {
|
||||
connectTimeout: 10000,
|
||||
commandTimeout: 10000,
|
||||
maxRetriesPerRequest: 1,
|
||||
lazyConnect: true,
|
||||
})
|
||||
|
||||
await client.connect()
|
||||
|
||||
const cmd = command.toUpperCase()
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const result = await (client as any).call(cmd, ...args)
|
||||
|
||||
await client.quit()
|
||||
client = null
|
||||
|
||||
return NextResponse.json({ result })
|
||||
} catch (error) {
|
||||
logger.error('Redis command failed', { error })
|
||||
const errorMessage = error instanceof Error ? error.message : 'Redis command failed'
|
||||
return NextResponse.json({ error: errorMessage }, { status: 500 })
|
||||
} finally {
|
||||
if (client) {
|
||||
try {
|
||||
await client.quit()
|
||||
} catch {
|
||||
client.disconnect()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
96
apps/sim/app/api/tools/slack/send-ephemeral/route.ts
Normal file
96
apps/sim/app/api/tools/slack/send-ephemeral/route.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { checkInternalAuth } from '@/lib/auth/hybrid'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('SlackSendEphemeralAPI')
|
||||
|
||||
const SlackSendEphemeralSchema = z.object({
|
||||
accessToken: z.string().min(1, 'Access token is required'),
|
||||
channel: z.string().min(1, 'Channel ID is required'),
|
||||
user: z.string().min(1, 'User ID is required'),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
thread_ts: z.string().optional().nullable(),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
const requestId = generateRequestId()
|
||||
|
||||
try {
|
||||
const authResult = await checkInternalAuth(request, { requireWorkflowId: false })
|
||||
|
||||
if (!authResult.success) {
|
||||
logger.warn(`[${requestId}] Unauthorized Slack ephemeral send attempt: ${authResult.error}`)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: authResult.error || 'Authentication required',
|
||||
},
|
||||
{ status: 401 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Authenticated Slack ephemeral send request via ${authResult.authType}`,
|
||||
{ userId: authResult.userId }
|
||||
)
|
||||
|
||||
const body = await request.json()
|
||||
const validatedData = SlackSendEphemeralSchema.parse(body)
|
||||
|
||||
logger.info(`[${requestId}] Sending ephemeral message`, {
|
||||
channel: validatedData.channel,
|
||||
user: validatedData.user,
|
||||
threadTs: validatedData.thread_ts ?? undefined,
|
||||
})
|
||||
|
||||
const response = await fetch('https://slack.com/api/chat.postEphemeral', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${validatedData.accessToken}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
channel: validatedData.channel,
|
||||
user: validatedData.user,
|
||||
text: validatedData.text,
|
||||
...(validatedData.thread_ts && { thread_ts: validatedData.thread_ts }),
|
||||
...(validatedData.blocks &&
|
||||
validatedData.blocks.length > 0 && { blocks: validatedData.blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
const data = await response.json()
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API error:`, data.error)
|
||||
return NextResponse.json(
|
||||
{ success: false, error: data.error || 'Failed to send ephemeral message' },
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Ephemeral message sent successfully`)
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
output: {
|
||||
messageTs: data.message_ts,
|
||||
channel: validatedData.channel,
|
||||
},
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`[${requestId}] Error sending ephemeral message:`, error)
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
error: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
},
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,7 @@ const SlackSendMessageSchema = z
|
||||
userId: z.string().optional().nullable(),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
thread_ts: z.string().optional().nullable(),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
files: RawFileInputArraySchema.optional().nullable(),
|
||||
})
|
||||
.refine((data) => data.channel || data.userId, {
|
||||
@@ -63,6 +64,7 @@ export async function POST(request: NextRequest) {
|
||||
userId: validatedData.userId ?? undefined,
|
||||
text: validatedData.text,
|
||||
threadTs: validatedData.thread_ts ?? undefined,
|
||||
blocks: validatedData.blocks ?? undefined,
|
||||
files: validatedData.files ?? undefined,
|
||||
},
|
||||
requestId,
|
||||
|
||||
@@ -13,6 +13,7 @@ const SlackUpdateMessageSchema = z.object({
|
||||
channel: z.string().min(1, 'Channel is required'),
|
||||
timestamp: z.string().min(1, 'Message timestamp is required'),
|
||||
text: z.string().min(1, 'Message text is required'),
|
||||
blocks: z.array(z.record(z.unknown())).optional().nullable(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
@@ -57,6 +58,8 @@ export async function POST(request: NextRequest) {
|
||||
channel: validatedData.channel,
|
||||
ts: validatedData.timestamp,
|
||||
text: validatedData.text,
|
||||
...(validatedData.blocks &&
|
||||
validatedData.blocks.length > 0 && { blocks: validatedData.blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
|
||||
@@ -11,7 +11,8 @@ export async function postSlackMessage(
|
||||
accessToken: string,
|
||||
channel: string,
|
||||
text: string,
|
||||
threadTs?: string | null
|
||||
threadTs?: string | null,
|
||||
blocks?: unknown[] | null
|
||||
): Promise<{ ok: boolean; ts?: string; channel?: string; message?: any; error?: string }> {
|
||||
const response = await fetch('https://slack.com/api/chat.postMessage', {
|
||||
method: 'POST',
|
||||
@@ -23,6 +24,7 @@ export async function postSlackMessage(
|
||||
channel,
|
||||
text,
|
||||
...(threadTs && { thread_ts: threadTs }),
|
||||
...(blocks && blocks.length > 0 && { blocks }),
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -220,6 +222,7 @@ export interface SlackMessageParams {
|
||||
userId?: string
|
||||
text: string
|
||||
threadTs?: string | null
|
||||
blocks?: unknown[] | null
|
||||
files?: any[] | null
|
||||
}
|
||||
|
||||
@@ -242,7 +245,7 @@ export async function sendSlackMessage(
|
||||
}
|
||||
error?: string
|
||||
}> {
|
||||
const { accessToken, text, threadTs, files } = params
|
||||
const { accessToken, text, threadTs, blocks, files } = params
|
||||
let { channel } = params
|
||||
|
||||
if (!channel && params.userId) {
|
||||
@@ -258,7 +261,7 @@ export async function sendSlackMessage(
|
||||
if (!files || files.length === 0) {
|
||||
logger.info(`[${requestId}] No files, using chat.postMessage`)
|
||||
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs, blocks)
|
||||
|
||||
if (!data.ok) {
|
||||
logger.error(`[${requestId}] Slack API error:`, data.error)
|
||||
@@ -282,7 +285,7 @@ export async function sendSlackMessage(
|
||||
if (fileIds.length === 0) {
|
||||
logger.warn(`[${requestId}] No valid files to upload, sending text-only message`)
|
||||
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs)
|
||||
const data = await postSlackMessage(accessToken, channel, text, threadTs, blocks)
|
||||
|
||||
if (!data.ok) {
|
||||
return { success: false, error: data.error || 'Failed to send message' }
|
||||
|
||||
@@ -165,7 +165,7 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
|
||||
try {
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
|
||||
@@ -101,7 +101,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const modelName =
|
||||
provider === 'anthropic' ? 'anthropic/claude-3-7-sonnet-latest' : 'openai/gpt-4.1'
|
||||
provider === 'anthropic' ? 'anthropic/claude-sonnet-4-5-20250929' : 'openai/gpt-5'
|
||||
|
||||
logger.info('Initializing Stagehand with Browserbase (v3)', { provider, modelName })
|
||||
|
||||
|
||||
@@ -766,7 +766,7 @@ async function transcribeWithGemini(
|
||||
const error = await response.json()
|
||||
if (response.status === 404) {
|
||||
throw new Error(
|
||||
`Model not found: ${modelName}. Use gemini-3-pro-preview, gemini-2.5-pro, gemini-2.5-flash, gemini-2.5-flash-lite, or gemini-2.0-flash-exp`
|
||||
`Model not found: ${modelName}. Use gemini-3.1-pro-preview, gemini-3-pro-preview, gemini-2.5-pro, gemini-2.5-flash, gemini-2.5-flash-lite, or gemini-2.0-flash-exp`
|
||||
)
|
||||
}
|
||||
const errorMessage = error.error?.message || JSON.stringify(error)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createMockRequest, loggerMock } from '@sim/testing'
|
||||
import { createMockRequest, loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/** Mock execution dependencies for webhook tests */
|
||||
@@ -348,9 +348,7 @@ vi.mock('postgres', () => vi.fn().mockReturnValue({}))
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn().mockReturnValue('test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
process.env.DATABASE_URL = 'postgresql://test:test@localhost:5432/test'
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { loggerMock, mockHybridAuth } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
let mockCheckSessionOrInternalAuth: ReturnType<typeof vi.fn>
|
||||
const mockAuthorizeWorkflowByWorkspacePermission = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
const mockDbFrom = vi.fn()
|
||||
@@ -48,9 +48,7 @@ describe('Workflow Chat Status Route', () => {
|
||||
workflowId: 'workflowId',
|
||||
},
|
||||
}))
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
|
||||
}))
|
||||
;({ mockCheckSessionOrInternalAuth } = mockHybridAuth())
|
||||
vi.doMock('@/lib/workflows/utils', () => ({
|
||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||
}))
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { loggerMock, mockHybridAuth } from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
let mockCheckSessionOrInternalAuth: ReturnType<typeof vi.fn>
|
||||
const mockAuthorizeWorkflowByWorkspacePermission = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
const mockDbFrom = vi.fn()
|
||||
@@ -43,9 +43,7 @@ describe('Workflow Form Status Route', () => {
|
||||
isActive: 'isActive',
|
||||
},
|
||||
}))
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: mockCheckSessionOrInternalAuth,
|
||||
}))
|
||||
;({ mockCheckSessionOrInternalAuth } = mockHybridAuth())
|
||||
vi.doMock('@/lib/workflows/utils', () => ({
|
||||
authorizeWorkflowByWorkspacePermission: mockAuthorizeWorkflowByWorkspacePermission,
|
||||
}))
|
||||
|
||||
@@ -5,11 +5,19 @@
|
||||
* @vitest-environment node
|
||||
*/
|
||||
|
||||
import { auditMock, loggerMock, setupGlobalFetchMock } from '@sim/testing'
|
||||
import {
|
||||
auditMock,
|
||||
envMock,
|
||||
loggerMock,
|
||||
requestUtilsMock,
|
||||
setupGlobalFetchMock,
|
||||
telemetryMock,
|
||||
} from '@sim/testing'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockGetSession = vi.fn()
|
||||
const mockCheckHybridAuth = vi.fn()
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
const mockLoadWorkflowFromNormalizedTables = vi.fn()
|
||||
const mockGetWorkflowById = vi.fn()
|
||||
const mockAuthorizeWorkflowByWorkspacePermission = vi.fn()
|
||||
@@ -17,10 +25,34 @@ const mockDbDelete = vi.fn()
|
||||
const mockDbUpdate = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
|
||||
/**
|
||||
* Helper to set mock auth state consistently across getSession and hybrid auth.
|
||||
*/
|
||||
function mockGetSession(session: { user: { id: string } } | null) {
|
||||
if (session) {
|
||||
mockCheckHybridAuth.mockResolvedValue({ success: true, userId: session.user.id })
|
||||
mockCheckSessionOrInternalAuth.mockResolvedValue({ success: true, userId: session.user.id })
|
||||
} else {
|
||||
mockCheckHybridAuth.mockResolvedValue({ success: false })
|
||||
mockCheckSessionOrInternalAuth.mockResolvedValue({ success: false })
|
||||
}
|
||||
}
|
||||
|
||||
vi.mock('@/lib/auth', () => ({
|
||||
getSession: () => mockGetSession(),
|
||||
getSession: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/auth/hybrid', () => ({
|
||||
checkHybridAuth: (...args: unknown[]) => mockCheckHybridAuth(...args),
|
||||
checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => envMock)
|
||||
|
||||
vi.mock('@/lib/core/telemetry', () => telemetryMock)
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/audit/log', () => auditMock)
|
||||
@@ -30,20 +62,14 @@ vi.mock('@/lib/workflows/persistence/utils', () => ({
|
||||
mockLoadWorkflowFromNormalizedTables(workflowId),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/workflows/utils', async () => {
|
||||
const actual =
|
||||
await vi.importActual<typeof import('@/lib/workflows/utils')>('@/lib/workflows/utils')
|
||||
|
||||
return {
|
||||
...actual,
|
||||
getWorkflowById: (workflowId: string) => mockGetWorkflowById(workflowId),
|
||||
authorizeWorkflowByWorkspacePermission: (params: {
|
||||
workflowId: string
|
||||
userId: string
|
||||
action?: 'read' | 'write' | 'admin'
|
||||
}) => mockAuthorizeWorkflowByWorkspacePermission(params),
|
||||
}
|
||||
})
|
||||
vi.mock('@/lib/workflows/utils', () => ({
|
||||
getWorkflowById: (workflowId: string) => mockGetWorkflowById(workflowId),
|
||||
authorizeWorkflowByWorkspacePermission: (params: {
|
||||
workflowId: string
|
||||
userId: string
|
||||
action?: 'read' | 'write' | 'admin'
|
||||
}) => mockAuthorizeWorkflowByWorkspacePermission(params),
|
||||
}))
|
||||
|
||||
vi.mock('@sim/db', () => ({
|
||||
db: {
|
||||
@@ -73,7 +99,7 @@ describe('Workflow By ID API Route', () => {
|
||||
|
||||
describe('GET /api/workflows/[id]', () => {
|
||||
it('should return 401 when user is not authenticated', async () => {
|
||||
mockGetSession.mockResolvedValue(null)
|
||||
mockGetSession(null)
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123')
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
@@ -86,9 +112,7 @@ describe('Workflow By ID API Route', () => {
|
||||
})
|
||||
|
||||
it('should return 404 when workflow does not exist', async () => {
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(null)
|
||||
|
||||
@@ -118,9 +142,7 @@ describe('Workflow By ID API Route', () => {
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -158,9 +180,7 @@ describe('Workflow By ID API Route', () => {
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -190,9 +210,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -229,9 +247,7 @@ describe('Workflow By ID API Route', () => {
|
||||
isFromNormalizedTables: true,
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -264,9 +280,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -308,9 +322,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -353,9 +365,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -392,9 +402,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -419,6 +427,16 @@ describe('Workflow By ID API Route', () => {
|
||||
})
|
||||
|
||||
describe('PUT /api/workflows/[id]', () => {
|
||||
function mockDuplicateCheck(results: Array<{ id: string }> = []) {
|
||||
mockDbSelect.mockReturnValue({
|
||||
from: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
limit: vi.fn().mockResolvedValue(results),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
it('should allow user with write permission to update workflow', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
@@ -430,9 +448,7 @@ describe('Workflow By ID API Route', () => {
|
||||
const updateData = { name: 'Updated Workflow' }
|
||||
const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() }
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -442,6 +458,8 @@ describe('Workflow By ID API Route', () => {
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDuplicateCheck([])
|
||||
|
||||
mockDbUpdate.mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
@@ -474,9 +492,7 @@ describe('Workflow By ID API Route', () => {
|
||||
const updateData = { name: 'Updated Workflow' }
|
||||
const updatedWorkflow = { ...mockWorkflow, ...updateData, updatedAt: new Date() }
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -486,6 +502,8 @@ describe('Workflow By ID API Route', () => {
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDuplicateCheck([])
|
||||
|
||||
mockDbUpdate.mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
@@ -517,9 +535,7 @@ describe('Workflow By ID API Route', () => {
|
||||
|
||||
const updateData = { name: 'Updated Workflow' }
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -551,9 +567,7 @@ describe('Workflow By ID API Route', () => {
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
@@ -577,13 +591,238 @@ describe('Workflow By ID API Route', () => {
|
||||
const data = await response.json()
|
||||
expect(data.error).toBe('Invalid request data')
|
||||
})
|
||||
|
||||
it('should reject rename when duplicate name exists in same folder', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'Original Name',
|
||||
folderId: 'folder-1',
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDuplicateCheck([{ id: 'workflow-other' }])
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ name: 'Duplicate Name' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
const data = await response.json()
|
||||
expect(data.error).toBe('A workflow named "Duplicate Name" already exists in this folder')
|
||||
})
|
||||
|
||||
it('should reject rename when duplicate name exists at root level', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'Original Name',
|
||||
folderId: null,
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDuplicateCheck([{ id: 'workflow-other' }])
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ name: 'Duplicate Name' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
const data = await response.json()
|
||||
expect(data.error).toBe('A workflow named "Duplicate Name" already exists in this folder')
|
||||
})
|
||||
|
||||
it('should allow rename when no duplicate exists in same folder', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'Original Name',
|
||||
folderId: 'folder-1',
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
const updatedWorkflow = { ...mockWorkflow, name: 'Unique Name', updatedAt: new Date() }
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDuplicateCheck([])
|
||||
|
||||
mockDbUpdate.mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([updatedWorkflow]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ name: 'Unique Name' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const data = await response.json()
|
||||
expect(data.workflow.name).toBe('Unique Name')
|
||||
})
|
||||
|
||||
it('should allow same name in different folders', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'My Workflow',
|
||||
folderId: 'folder-1',
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
const updatedWorkflow = { ...mockWorkflow, folderId: 'folder-2', updatedAt: new Date() }
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
// No duplicate in target folder
|
||||
mockDuplicateCheck([])
|
||||
|
||||
mockDbUpdate.mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([updatedWorkflow]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ folderId: 'folder-2' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
const data = await response.json()
|
||||
expect(data.workflow.folderId).toBe('folder-2')
|
||||
})
|
||||
|
||||
it('should reject moving to a folder where same name already exists', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'My Workflow',
|
||||
folderId: 'folder-1',
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
// Duplicate exists in target folder
|
||||
mockDuplicateCheck([{ id: 'workflow-other' }])
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ folderId: 'folder-2' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(409)
|
||||
const data = await response.json()
|
||||
expect(data.error).toBe('A workflow named "My Workflow" already exists in this folder')
|
||||
})
|
||||
|
||||
it('should skip duplicate check when only updating non-name/non-folder fields', async () => {
|
||||
const mockWorkflow = {
|
||||
id: 'workflow-123',
|
||||
userId: 'user-123',
|
||||
name: 'Test Workflow',
|
||||
workspaceId: 'workspace-456',
|
||||
}
|
||||
|
||||
const updatedWorkflow = { ...mockWorkflow, color: '#FF0000', updatedAt: new Date() }
|
||||
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
mockGetWorkflowById.mockResolvedValue(mockWorkflow)
|
||||
mockAuthorizeWorkflowByWorkspacePermission.mockResolvedValue({
|
||||
allowed: true,
|
||||
status: 200,
|
||||
workflow: mockWorkflow,
|
||||
workspacePermission: 'write',
|
||||
})
|
||||
|
||||
mockDbUpdate.mockReturnValue({
|
||||
set: vi.fn().mockReturnValue({
|
||||
where: vi.fn().mockReturnValue({
|
||||
returning: vi.fn().mockResolvedValue([updatedWorkflow]),
|
||||
}),
|
||||
}),
|
||||
})
|
||||
|
||||
const req = new NextRequest('http://localhost:3000/api/workflows/workflow-123', {
|
||||
method: 'PUT',
|
||||
body: JSON.stringify({ color: '#FF0000' }),
|
||||
})
|
||||
const params = Promise.resolve({ id: 'workflow-123' })
|
||||
|
||||
const response = await PUT(req, { params })
|
||||
|
||||
expect(response.status).toBe(200)
|
||||
// db.select should NOT have been called since no name/folder change
|
||||
expect(mockDbSelect).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error handling', () => {
|
||||
it.concurrent('should handle database errors gracefully', async () => {
|
||||
mockGetSession.mockResolvedValue({
|
||||
user: { id: 'user-123' },
|
||||
})
|
||||
mockGetSession({ user: { id: 'user-123' } })
|
||||
|
||||
mockGetWorkflowById.mockRejectedValue(new Error('Database connection timeout'))
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { db } from '@sim/db'
|
||||
import { templates, webhook, workflow } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { and, eq, isNull, ne } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { AuditAction, AuditResourceType, recordAudit } from '@/lib/audit/log'
|
||||
@@ -411,6 +411,45 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
|
||||
if (updates.folderId !== undefined) updateData.folderId = updates.folderId
|
||||
if (updates.sortOrder !== undefined) updateData.sortOrder = updates.sortOrder
|
||||
|
||||
if (updates.name !== undefined || updates.folderId !== undefined) {
|
||||
const targetName = updates.name ?? workflowData.name
|
||||
const targetFolderId =
|
||||
updates.folderId !== undefined ? updates.folderId : workflowData.folderId
|
||||
|
||||
if (!workflowData.workspaceId) {
|
||||
logger.error(`[${requestId}] Workflow ${workflowId} has no workspaceId`)
|
||||
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
|
||||
}
|
||||
|
||||
const conditions = [
|
||||
eq(workflow.workspaceId, workflowData.workspaceId),
|
||||
eq(workflow.name, targetName),
|
||||
ne(workflow.id, workflowId),
|
||||
]
|
||||
|
||||
if (targetFolderId) {
|
||||
conditions.push(eq(workflow.folderId, targetFolderId))
|
||||
} else {
|
||||
conditions.push(isNull(workflow.folderId))
|
||||
}
|
||||
|
||||
const [duplicate] = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
.where(and(...conditions))
|
||||
.limit(1)
|
||||
|
||||
if (duplicate) {
|
||||
logger.warn(
|
||||
`[${requestId}] Duplicate workflow name "${targetName}" in folder ${targetFolderId ?? 'root'}`
|
||||
)
|
||||
return NextResponse.json(
|
||||
{ error: `A workflow named "${targetName}" already exists in this folder` },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Update the workflow
|
||||
const [updatedWorkflow] = await db
|
||||
.update(workflow)
|
||||
|
||||
@@ -1,11 +1,16 @@
|
||||
/**
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { auditMock, createMockRequest, mockConsoleLogger, setupCommonApiMocks } from '@sim/testing'
|
||||
import {
|
||||
auditMock,
|
||||
createMockRequest,
|
||||
mockConsoleLogger,
|
||||
mockHybridAuth,
|
||||
setupCommonApiMocks,
|
||||
} from '@sim/testing'
|
||||
import { drizzleOrmMock } from '@sim/testing/mocks'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
const mockCheckSessionOrInternalAuth = vi.fn()
|
||||
const mockGetUserEntityPermissions = vi.fn()
|
||||
const mockDbSelect = vi.fn()
|
||||
const mockDbInsert = vi.fn()
|
||||
@@ -30,6 +35,7 @@ describe('Workflows API Route - POST ordering', () => {
|
||||
randomUUID: vi.fn().mockReturnValue('workflow-new-id'),
|
||||
})
|
||||
|
||||
const { mockCheckSessionOrInternalAuth } = mockHybridAuth()
|
||||
mockCheckSessionOrInternalAuth.mockResolvedValue({
|
||||
success: true,
|
||||
userId: 'user-123',
|
||||
@@ -45,10 +51,6 @@ describe('Workflows API Route - POST ordering', () => {
|
||||
},
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/auth/hybrid', () => ({
|
||||
checkSessionOrInternalAuth: (...args: unknown[]) => mockCheckSessionOrInternalAuth(...args),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/workspaces/permissions/utils', () => ({
|
||||
getUserEntityPermissions: (...args: unknown[]) => mockGetUserEntityPermissions(...args),
|
||||
workspaceExists: vi.fn(),
|
||||
|
||||
@@ -208,9 +208,10 @@ export default function Logs() {
|
||||
|
||||
const selectedLog = useMemo(() => {
|
||||
if (!selectedLogFromList) return null
|
||||
if (!activeLogQuery.data || isPreviewOpen) return selectedLogFromList
|
||||
if (!activeLogQuery.data || isPreviewOpen || activeLogQuery.isPlaceholderData)
|
||||
return selectedLogFromList
|
||||
return { ...selectedLogFromList, ...activeLogQuery.data }
|
||||
}, [selectedLogFromList, activeLogQuery.data, isPreviewOpen])
|
||||
}, [selectedLogFromList, activeLogQuery.data, activeLogQuery.isPlaceholderData, isPreviewOpen])
|
||||
|
||||
const handleLogHover = useCallback(
|
||||
(log: WorkflowLog) => {
|
||||
@@ -650,7 +651,7 @@ export default function Logs() {
|
||||
hasActiveFilters={filtersActive}
|
||||
/>
|
||||
|
||||
{isPreviewOpen && activeLogQuery.data?.executionId && (
|
||||
{isPreviewOpen && !activeLogQuery.isPlaceholderData && activeLogQuery.data?.executionId && (
|
||||
<ExecutionSnapshot
|
||||
executionId={activeLogQuery.data.executionId}
|
||||
traceSpans={activeLogQuery.data.executionData?.traceSpans}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { redirect } from 'next/navigation'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyWorkspaceMembership } from '@/app/api/workflows/utils'
|
||||
import { getUserPermissionConfig } from '@/ee/access-control/utils/permission-check'
|
||||
import { TablesView } from './components'
|
||||
|
||||
interface TablesPageProps {
|
||||
@@ -22,5 +23,10 @@ export default async function TablesPage({ params }: TablesPageProps) {
|
||||
redirect('/')
|
||||
}
|
||||
|
||||
const permissionConfig = await getUserPermissionConfig(session.user.id)
|
||||
if (permissionConfig?.hideTablesTab) {
|
||||
redirect(`/workspace/${workspaceId}`)
|
||||
}
|
||||
|
||||
return <TablesView />
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useMemo } from 'react'
|
||||
import { hasWorkflowChanged } from '@/lib/workflows/comparison'
|
||||
import { mergeSubblockStateWithValues } from '@/lib/workflows/subblocks'
|
||||
import { useVariablesStore } from '@/stores/panel/variables/store'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
@@ -42,44 +43,10 @@ export function useChangeDetection({
|
||||
const currentState = useMemo((): WorkflowState | null => {
|
||||
if (!workflowId) return null
|
||||
|
||||
const blocksWithSubBlocks: WorkflowState['blocks'] = {}
|
||||
for (const [blockId, block] of Object.entries(blocks)) {
|
||||
const blockSubValues = subBlockValues?.[blockId] || {}
|
||||
const subBlocks: Record<string, any> = {}
|
||||
|
||||
if (block.subBlocks) {
|
||||
for (const [subId, subBlock] of Object.entries(block.subBlocks)) {
|
||||
const storedValue = blockSubValues[subId]
|
||||
subBlocks[subId] = {
|
||||
...subBlock,
|
||||
value: storedValue !== undefined ? storedValue : subBlock.value,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (block.triggerMode) {
|
||||
const triggerConfigValue = blockSubValues?.triggerConfig
|
||||
if (
|
||||
triggerConfigValue &&
|
||||
typeof triggerConfigValue === 'object' &&
|
||||
!subBlocks.triggerConfig
|
||||
) {
|
||||
subBlocks.triggerConfig = {
|
||||
id: 'triggerConfig',
|
||||
type: 'short-input',
|
||||
value: triggerConfigValue,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
blocksWithSubBlocks[blockId] = {
|
||||
...block,
|
||||
subBlocks,
|
||||
}
|
||||
}
|
||||
const mergedBlocks = mergeSubblockStateWithValues(blocks, subBlockValues ?? {})
|
||||
|
||||
return {
|
||||
blocks: blocksWithSubBlocks,
|
||||
blocks: mergedBlocks,
|
||||
edges,
|
||||
loops,
|
||||
parallels,
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
interface EmptyStateProps {
|
||||
onAdd: () => void
|
||||
disabled: boolean
|
||||
label: string
|
||||
}
|
||||
|
||||
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
|
||||
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
{label}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,9 +1,19 @@
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, Combobox, type ComboboxOption, Input } from '@/components/emcn'
|
||||
import { useRef } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import {
|
||||
Badge,
|
||||
Button,
|
||||
Combobox,
|
||||
type ComboboxOption,
|
||||
Input,
|
||||
Label,
|
||||
Trash,
|
||||
} from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { formatDisplayText } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/formatted-text'
|
||||
import { SubBlockInputController } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/sub-block-input-controller'
|
||||
import { TagDropdown } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/components/tag-dropdown/tag-dropdown'
|
||||
import type { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useAccessibleReferencePrefixes } from '@/app/workspace/[workspaceId]/w/[workflowId]/hooks/use-accessible-reference-prefixes'
|
||||
|
||||
interface FilterRuleRowProps {
|
||||
@@ -17,121 +27,196 @@ interface FilterRuleRowProps {
|
||||
isReadOnly: boolean
|
||||
isPreview: boolean
|
||||
disabled: boolean
|
||||
onAdd: () => void
|
||||
onRemove: (id: string) => void
|
||||
onUpdate: (id: string, field: keyof FilterRule, value: string) => void
|
||||
onToggleCollapse: (id: string) => void
|
||||
inputController: ReturnType<typeof useSubBlockInput>
|
||||
}
|
||||
|
||||
export function FilterRuleRow({
|
||||
blockId,
|
||||
subBlockId,
|
||||
rule,
|
||||
index,
|
||||
columns,
|
||||
comparisonOptions,
|
||||
logicalOptions,
|
||||
isReadOnly,
|
||||
isPreview,
|
||||
disabled,
|
||||
onAdd,
|
||||
onRemove,
|
||||
onUpdate,
|
||||
onToggleCollapse,
|
||||
inputController,
|
||||
}: FilterRuleRowProps) {
|
||||
const accessiblePrefixes = useAccessibleReferencePrefixes(blockId)
|
||||
const valueInputRef = useRef<HTMLInputElement>(null)
|
||||
const overlayRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
return (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
const syncOverlayScroll = (scrollLeft: number) => {
|
||||
if (overlayRef.current) overlayRef.current.scrollLeft = scrollLeft
|
||||
}
|
||||
|
||||
const cellKey = `filter-${rule.id}-value`
|
||||
const fieldState = inputController.fieldHelpers.getFieldState(cellKey)
|
||||
const handlers = inputController.fieldHelpers.createFieldHandlers(
|
||||
cellKey,
|
||||
rule.value,
|
||||
(newValue) => onUpdate(rule.id, 'value', newValue)
|
||||
)
|
||||
const tagSelectHandler = inputController.fieldHelpers.createTagSelectHandler(
|
||||
cellKey,
|
||||
rule.value,
|
||||
(newValue) => onUpdate(rule.id, 'value', newValue)
|
||||
)
|
||||
|
||||
const getOperatorLabel = (value: string) => {
|
||||
const option = comparisonOptions.find((op) => op.value === value)
|
||||
return option?.label || value
|
||||
}
|
||||
|
||||
const getColumnLabel = (value: string) => {
|
||||
const option = columns.find((col) => col.value === value)
|
||||
return option?.label || value
|
||||
}
|
||||
|
||||
const renderHeader = () => (
|
||||
<div
|
||||
className='flex cursor-pointer items-center justify-between rounded-t-[4px] bg-[var(--surface-4)] px-[10px] py-[5px]'
|
||||
onClick={() => onToggleCollapse(rule.id)}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{rule.collapsed && rule.column ? getColumnLabel(rule.column) : `Condition ${index + 1}`}
|
||||
</span>
|
||||
{rule.collapsed && rule.column && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{getOperatorLabel(rule.operator)}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px] pl-[8px]' onClick={(e) => e.stopPropagation()}>
|
||||
<Button variant='ghost' onClick={onAdd} disabled={isReadOnly} className='h-auto p-0'>
|
||||
<Plus className='h-[14px] w-[14px]' />
|
||||
<span className='sr-only'>Add Condition</span>
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
disabled={isReadOnly}
|
||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
<span className='sr-only'>Delete Condition</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
const renderValueInput = () => (
|
||||
<div className='relative'>
|
||||
<Input
|
||||
ref={valueInputRef}
|
||||
value={rule.value}
|
||||
onChange={handlers.onChange}
|
||||
onKeyDown={handlers.onKeyDown}
|
||||
onDrop={handlers.onDrop}
|
||||
onDragOver={handlers.onDragOver}
|
||||
onFocus={handlers.onFocus}
|
||||
onScroll={(e) => syncOverlayScroll(e.currentTarget.scrollLeft)}
|
||||
onPaste={() =>
|
||||
setTimeout(() => {
|
||||
if (valueInputRef.current) {
|
||||
syncOverlayScroll(valueInputRef.current.scrollLeft)
|
||||
}
|
||||
}, 0)
|
||||
}
|
||||
disabled={isReadOnly}
|
||||
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
|
||||
autoComplete='off'
|
||||
placeholder='Enter value'
|
||||
className='allow-scroll w-full overflow-auto text-transparent caret-foreground'
|
||||
/>
|
||||
<div
|
||||
ref={overlayRef}
|
||||
className={cn(
|
||||
'absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-sm',
|
||||
!isReadOnly && 'pointer-events-none'
|
||||
)}
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
<div className='w-full whitespace-pre' style={{ minWidth: 'fit-content' }}>
|
||||
{formatDisplayText(
|
||||
rule.value,
|
||||
accessiblePrefixes ? { accessiblePrefixes } : { highlightAll: true }
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{fieldState.showTags && (
|
||||
<TagDropdown
|
||||
visible={fieldState.showTags}
|
||||
onSelect={tagSelectHandler}
|
||||
blockId={blockId}
|
||||
activeSourceBlockId={fieldState.activeSourceBlockId}
|
||||
inputValue={rule.value}
|
||||
cursorPosition={fieldState.cursorPosition}
|
||||
onClose={() => inputController.fieldHelpers.hideFieldDropdowns(cellKey)}
|
||||
inputRef={valueInputRef.current ? { current: valueInputRef.current } : undefined}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
|
||||
<div className='w-[80px] shrink-0'>
|
||||
{index === 0 ? (
|
||||
const renderContent = () => (
|
||||
<div className='flex flex-col gap-[8px] border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
{index > 0 && (
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Logic</Label>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[{ value: 'where', label: 'where' }]}
|
||||
value='where'
|
||||
disabled
|
||||
/>
|
||||
) : (
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={logicalOptions}
|
||||
value={rule.logicalOperator}
|
||||
onChange={(v) => onUpdate(rule.id, 'logicalOperator', v as 'and' | 'or')}
|
||||
disabled={isReadOnly}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='w-[100px] shrink-0'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Column</Label>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={columns}
|
||||
value={rule.column}
|
||||
onChange={(v) => onUpdate(rule.id, 'column', v)}
|
||||
placeholder='Column'
|
||||
disabled={isReadOnly}
|
||||
placeholder='Select column'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='w-[110px] shrink-0'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Operator</Label>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={comparisonOptions}
|
||||
value={rule.operator}
|
||||
onChange={(v) => onUpdate(rule.id, 'operator', v)}
|
||||
disabled={isReadOnly}
|
||||
placeholder='Select operator'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='relative min-w-[80px] flex-1'>
|
||||
<SubBlockInputController
|
||||
blockId={blockId}
|
||||
subBlockId={`${subBlockId}_filter_${rule.id}`}
|
||||
config={{ id: `filter_value_${rule.id}`, type: 'short-input' }}
|
||||
value={rule.value}
|
||||
onChange={(newValue) => onUpdate(rule.id, 'value', newValue)}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
>
|
||||
{({ ref, value: ctrlValue, onChange, onKeyDown, onDrop, onDragOver }) => {
|
||||
const formattedText = formatDisplayText(ctrlValue, {
|
||||
accessiblePrefixes,
|
||||
highlightAll: !accessiblePrefixes,
|
||||
})
|
||||
|
||||
return (
|
||||
<div className='relative'>
|
||||
<Input
|
||||
ref={ref as React.RefObject<HTMLInputElement>}
|
||||
className='h-[28px] w-full overflow-auto text-[12px] text-transparent caret-foreground [-ms-overflow-style:none] [scrollbar-width:none] placeholder:text-muted-foreground/50 [&::-webkit-scrollbar]:hidden'
|
||||
value={ctrlValue}
|
||||
onChange={onChange as (e: React.ChangeEvent<HTMLInputElement>) => void}
|
||||
onKeyDown={onKeyDown as (e: React.KeyboardEvent<HTMLInputElement>) => void}
|
||||
onDrop={onDrop as (e: React.DragEvent<HTMLInputElement>) => void}
|
||||
onDragOver={onDragOver as (e: React.DragEvent<HTMLInputElement>) => void}
|
||||
placeholder='Value'
|
||||
disabled={isReadOnly}
|
||||
autoComplete='off'
|
||||
/>
|
||||
<div
|
||||
className={cn(
|
||||
'pointer-events-none absolute inset-0 flex items-center overflow-x-auto bg-transparent px-[8px] py-[6px] font-medium font-sans text-[12px] text-foreground [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden',
|
||||
(isPreview || disabled) && 'opacity-50'
|
||||
)}
|
||||
>
|
||||
<div className='min-w-fit whitespace-pre'>{formattedText}</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}}
|
||||
</SubBlockInputController>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Value</Label>
|
||||
{renderValueInput()}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
return (
|
||||
<div
|
||||
data-filter-id={rule.id}
|
||||
className={cn(
|
||||
'rounded-[4px] border border-[var(--border-1)]',
|
||||
rule.collapsed ? 'overflow-hidden' : 'overflow-visible'
|
||||
)}
|
||||
>
|
||||
{renderHeader()}
|
||||
{!rule.collapsed && renderContent()}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
'use client'
|
||||
|
||||
import { useMemo } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import type { ComboboxOption } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import type { FilterRule } from '@/lib/table/query-builder/constants'
|
||||
import { useFilterBuilder } from '@/lib/table/query-builder/use-query-builder'
|
||||
import { useSubBlockInput } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-input'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { EmptyState } from './components/empty-state'
|
||||
import { FilterRuleRow } from './components/filter-rule-row'
|
||||
|
||||
interface FilterBuilderProps {
|
||||
@@ -20,6 +19,15 @@ interface FilterBuilderProps {
|
||||
tableIdSubBlockId?: string
|
||||
}
|
||||
|
||||
const createDefaultRule = (columns: ComboboxOption[]): FilterRule => ({
|
||||
id: crypto.randomUUID(),
|
||||
logicalOperator: 'and',
|
||||
column: columns[0]?.value || '',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
collapsed: false,
|
||||
})
|
||||
|
||||
/** Visual builder for table filter rules in workflow blocks. */
|
||||
export function FilterBuilder({
|
||||
blockId,
|
||||
@@ -40,7 +48,8 @@ export function FilterBuilder({
|
||||
}, [propColumns, dynamicColumns])
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const rules: FilterRule[] = Array.isArray(value) && value.length > 0 ? value : []
|
||||
const rules: FilterRule[] =
|
||||
Array.isArray(value) && value.length > 0 ? value : [createDefaultRule(columns)]
|
||||
const isReadOnly = isPreview || disabled
|
||||
|
||||
const { comparisonOptions, logicalOptions, addRule, removeRule, updateRule } = useFilterBuilder({
|
||||
@@ -50,41 +59,60 @@ export function FilterBuilder({
|
||||
isReadOnly,
|
||||
})
|
||||
|
||||
const inputController = useSubBlockInput({
|
||||
blockId,
|
||||
subBlockId,
|
||||
config: {
|
||||
id: subBlockId,
|
||||
type: 'filter-builder',
|
||||
connectionDroppable: true,
|
||||
},
|
||||
isPreview,
|
||||
disabled,
|
||||
})
|
||||
|
||||
const toggleCollapse = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue(rules.map((r) => (r.id === id ? { ...r, collapsed: !r.collapsed } : r)))
|
||||
},
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
)
|
||||
|
||||
const handleRemoveRule = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
if (rules.length === 1) {
|
||||
setStoreValue([createDefaultRule(columns)])
|
||||
} else {
|
||||
removeRule(id)
|
||||
}
|
||||
},
|
||||
[isReadOnly, rules, columns, setStoreValue, removeRule]
|
||||
)
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{rules.length === 0 ? (
|
||||
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add filter rule' />
|
||||
) : (
|
||||
<>
|
||||
{rules.map((rule, index) => (
|
||||
<FilterRuleRow
|
||||
key={rule.id}
|
||||
blockId={blockId}
|
||||
subBlockId={subBlockId}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
comparisonOptions={comparisonOptions}
|
||||
logicalOptions={logicalOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
onRemove={removeRule}
|
||||
onUpdate={updateRule}
|
||||
/>
|
||||
))}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={addRule}
|
||||
disabled={isReadOnly}
|
||||
className='self-start'
|
||||
>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add rule
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
<div className='space-y-[8px]'>
|
||||
{rules.map((rule, index) => (
|
||||
<FilterRuleRow
|
||||
key={rule.id}
|
||||
blockId={blockId}
|
||||
subBlockId={subBlockId}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
comparisonOptions={comparisonOptions}
|
||||
logicalOptions={logicalOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
isPreview={isPreview}
|
||||
disabled={disabled}
|
||||
onAdd={addRule}
|
||||
onRemove={handleRemoveRule}
|
||||
onUpdate={updateRule}
|
||||
onToggleCollapse={toggleCollapse}
|
||||
inputController={inputController}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Button } from '@/components/emcn'
|
||||
|
||||
interface EmptyStateProps {
|
||||
onAdd: () => void
|
||||
disabled: boolean
|
||||
label: string
|
||||
}
|
||||
|
||||
export function EmptyState({ onAdd, disabled, label }: EmptyStateProps) {
|
||||
return (
|
||||
<div className='flex items-center justify-center rounded-[4px] border border-[var(--border-1)] border-dashed py-[16px]'>
|
||||
<Button variant='ghost' size='sm' onClick={onAdd} disabled={disabled}>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
{label}
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import { X } from 'lucide-react'
|
||||
import { Button, Combobox, type ComboboxOption } from '@/components/emcn'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { Badge, Button, Combobox, type ComboboxOption, Label, Trash } from '@/components/emcn'
|
||||
import { cn } from '@/lib/core/utils/cn'
|
||||
import type { SortRule } from '@/lib/table/query-builder/constants'
|
||||
|
||||
interface SortRuleRowProps {
|
||||
@@ -8,8 +9,10 @@ interface SortRuleRowProps {
|
||||
columns: ComboboxOption[]
|
||||
directionOptions: ComboboxOption[]
|
||||
isReadOnly: boolean
|
||||
onAdd: () => void
|
||||
onRemove: (id: string) => void
|
||||
onUpdate: (id: string, field: keyof SortRule, value: string) => void
|
||||
onToggleCollapse: (id: string) => void
|
||||
}
|
||||
|
||||
export function SortRuleRow({
|
||||
@@ -18,50 +21,90 @@ export function SortRuleRow({
|
||||
columns,
|
||||
directionOptions,
|
||||
isReadOnly,
|
||||
onAdd,
|
||||
onRemove,
|
||||
onUpdate,
|
||||
onToggleCollapse,
|
||||
}: SortRuleRowProps) {
|
||||
return (
|
||||
<div className='flex items-center gap-[6px]'>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
disabled={isReadOnly}
|
||||
className='h-[24px] w-[24px] shrink-0 p-0 text-[var(--text-tertiary)] hover:text-[var(--text-primary)]'
|
||||
>
|
||||
<X className='h-[12px] w-[12px]' />
|
||||
</Button>
|
||||
const getDirectionLabel = (value: string) => {
|
||||
const option = directionOptions.find((dir) => dir.value === value)
|
||||
return option?.label || value
|
||||
}
|
||||
|
||||
<div className='w-[90px] shrink-0'>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={[{ value: String(index + 1), label: index === 0 ? 'order by' : 'then by' }]}
|
||||
value={String(index + 1)}
|
||||
disabled
|
||||
/>
|
||||
const getColumnLabel = (value: string) => {
|
||||
const option = columns.find((col) => col.value === value)
|
||||
return option?.label || value
|
||||
}
|
||||
|
||||
const renderHeader = () => (
|
||||
<div
|
||||
className='flex cursor-pointer items-center justify-between rounded-t-[4px] bg-[var(--surface-4)] px-[10px] py-[5px]'
|
||||
onClick={() => onToggleCollapse(rule.id)}
|
||||
>
|
||||
<div className='flex min-w-0 flex-1 items-center gap-[8px]'>
|
||||
<span className='block truncate font-medium text-[14px] text-[var(--text-tertiary)]'>
|
||||
{rule.collapsed && rule.column ? getColumnLabel(rule.column) : `Sort ${index + 1}`}
|
||||
</span>
|
||||
{rule.collapsed && rule.column && (
|
||||
<Badge variant='type' size='sm'>
|
||||
{getDirectionLabel(rule.direction)}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
<div className='flex items-center gap-[8px] pl-[8px]' onClick={(e) => e.stopPropagation()}>
|
||||
<Button variant='ghost' onClick={onAdd} disabled={isReadOnly} className='h-auto p-0'>
|
||||
<Plus className='h-[14px] w-[14px]' />
|
||||
<span className='sr-only'>Add Sort</span>
|
||||
</Button>
|
||||
<Button
|
||||
variant='ghost'
|
||||
onClick={() => onRemove(rule.id)}
|
||||
disabled={isReadOnly}
|
||||
className='h-auto p-0 text-[var(--text-error)] hover:text-[var(--text-error)]'
|
||||
>
|
||||
<Trash className='h-[14px] w-[14px]' />
|
||||
<span className='sr-only'>Delete Sort</span>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
<div className='min-w-[120px] flex-1'>
|
||||
const renderContent = () => (
|
||||
<div className='flex flex-col gap-[8px] border-[var(--border-1)] border-t px-[10px] pt-[6px] pb-[10px]'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Column</Label>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={columns}
|
||||
value={rule.column}
|
||||
onChange={(v) => onUpdate(rule.id, 'column', v)}
|
||||
placeholder='Column'
|
||||
disabled={isReadOnly}
|
||||
placeholder='Select column'
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='w-[110px] shrink-0'>
|
||||
<div className='flex flex-col gap-[6px]'>
|
||||
<Label className='text-[13px]'>Direction</Label>
|
||||
<Combobox
|
||||
size='sm'
|
||||
options={directionOptions}
|
||||
value={rule.direction}
|
||||
onChange={(v) => onUpdate(rule.id, 'direction', v as 'asc' | 'desc')}
|
||||
disabled={isReadOnly}
|
||||
placeholder='Select direction'
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
|
||||
return (
|
||||
<div
|
||||
data-sort-id={rule.id}
|
||||
className={cn(
|
||||
'rounded-[4px] border border-[var(--border-1)]',
|
||||
rule.collapsed ? 'overflow-hidden' : 'overflow-visible'
|
||||
)}
|
||||
>
|
||||
{renderHeader()}
|
||||
{!rule.collapsed && renderContent()}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,13 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useMemo } from 'react'
|
||||
import { Plus } from 'lucide-react'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { Button, type ComboboxOption } from '@/components/emcn'
|
||||
import type { ComboboxOption } from '@/components/emcn'
|
||||
import { useTableColumns } from '@/lib/table/hooks'
|
||||
import { SORT_DIRECTIONS, type SortRule } from '@/lib/table/query-builder/constants'
|
||||
import { useSubBlockValue } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/components/editor/components/sub-block/hooks/use-sub-block-value'
|
||||
import { EmptyState } from './components/empty-state'
|
||||
import { SortRuleRow } from './components/sort-rule-row'
|
||||
|
||||
interface SortBuilderProps {
|
||||
@@ -21,9 +18,10 @@ interface SortBuilderProps {
|
||||
}
|
||||
|
||||
const createDefaultRule = (columns: ComboboxOption[]): SortRule => ({
|
||||
id: nanoid(),
|
||||
id: crypto.randomUUID(),
|
||||
column: columns[0]?.value || '',
|
||||
direction: 'asc',
|
||||
collapsed: false,
|
||||
})
|
||||
|
||||
/** Visual builder for table sort rules in workflow blocks. */
|
||||
@@ -51,7 +49,8 @@ export function SortBuilder({
|
||||
)
|
||||
|
||||
const value = isPreview ? previewValue : storeValue
|
||||
const rules: SortRule[] = Array.isArray(value) && value.length > 0 ? value : []
|
||||
const rules: SortRule[] =
|
||||
Array.isArray(value) && value.length > 0 ? value : [createDefaultRule(columns)]
|
||||
const isReadOnly = isPreview || disabled
|
||||
|
||||
const addRule = useCallback(() => {
|
||||
@@ -62,9 +61,13 @@ export function SortBuilder({
|
||||
const removeRule = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue(rules.filter((r) => r.id !== id))
|
||||
if (rules.length === 1) {
|
||||
setStoreValue([createDefaultRule(columns)])
|
||||
} else {
|
||||
setStoreValue(rules.filter((r) => r.id !== id))
|
||||
}
|
||||
},
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
[isReadOnly, rules, columns, setStoreValue]
|
||||
)
|
||||
|
||||
const updateRule = useCallback(
|
||||
@@ -75,36 +78,30 @@ export function SortBuilder({
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
)
|
||||
|
||||
const toggleCollapse = useCallback(
|
||||
(id: string) => {
|
||||
if (isReadOnly) return
|
||||
setStoreValue(rules.map((r) => (r.id === id ? { ...r, collapsed: !r.collapsed } : r)))
|
||||
},
|
||||
[isReadOnly, rules, setStoreValue]
|
||||
)
|
||||
|
||||
return (
|
||||
<div className='flex flex-col gap-[8px]'>
|
||||
{rules.length === 0 ? (
|
||||
<EmptyState onAdd={addRule} disabled={isReadOnly} label='Add sort rule' />
|
||||
) : (
|
||||
<>
|
||||
{rules.map((rule, index) => (
|
||||
<SortRuleRow
|
||||
key={rule.id}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
directionOptions={directionOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
onRemove={removeRule}
|
||||
onUpdate={updateRule}
|
||||
/>
|
||||
))}
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='sm'
|
||||
onClick={addRule}
|
||||
disabled={isReadOnly}
|
||||
className='self-start'
|
||||
>
|
||||
<Plus className='mr-[4px] h-[12px] w-[12px]' />
|
||||
Add sort
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
<div className='space-y-[8px]'>
|
||||
{rules.map((rule, index) => (
|
||||
<SortRuleRow
|
||||
key={rule.id}
|
||||
rule={rule}
|
||||
index={index}
|
||||
columns={columns}
|
||||
directionOptions={directionOptions}
|
||||
isReadOnly={isReadOnly}
|
||||
onAdd={addRule}
|
||||
onRemove={removeRule}
|
||||
onUpdate={updateRule}
|
||||
onToggleCollapse={toggleCollapse}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -336,6 +336,23 @@ const renderLabel = (
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{showExternalLink && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0'
|
||||
onClick={externalLink?.onClick}
|
||||
aria-label={externalLink?.tooltip}
|
||||
>
|
||||
<ExternalLink className='!h-[12px] !w-[12px] text-[var(--text-secondary)]' />
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>{externalLink?.tooltip}</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
{showCanonicalToggle && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
@@ -369,23 +386,6 @@ const renderLabel = (
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
{showExternalLink && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<button
|
||||
type='button'
|
||||
className='flex h-[12px] w-[12px] flex-shrink-0 items-center justify-center bg-transparent p-0'
|
||||
onClick={externalLink?.onClick}
|
||||
aria-label={externalLink?.tooltip}
|
||||
>
|
||||
<ExternalLink className='!h-[12px] !w-[12px] text-[var(--text-secondary)]' />
|
||||
</button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>{externalLink?.tooltip}</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
@@ -495,23 +495,47 @@ function SubBlockComponent({
|
||||
: null
|
||||
const hasSelectedTable = tableId && !tableId.startsWith('<')
|
||||
|
||||
const knowledgeBaseId =
|
||||
config.type === 'knowledge-base-selector' && subBlockValues
|
||||
? (subBlockValues[config.id]?.value as string | null)
|
||||
: null
|
||||
const hasSelectedKnowledgeBase = knowledgeBaseId && !knowledgeBaseId.startsWith('<')
|
||||
|
||||
const handleNavigateToTable = useCallback(() => {
|
||||
if (tableId && workspaceId) {
|
||||
window.open(`/workspace/${workspaceId}/tables/${tableId}`, '_blank')
|
||||
}
|
||||
}, [workspaceId, tableId])
|
||||
|
||||
const externalLink = useMemo(
|
||||
() =>
|
||||
config.type === 'table-selector' && hasSelectedTable
|
||||
? {
|
||||
show: true,
|
||||
onClick: handleNavigateToTable,
|
||||
tooltip: 'View table',
|
||||
}
|
||||
: undefined,
|
||||
[config.type, hasSelectedTable, handleNavigateToTable]
|
||||
)
|
||||
const handleNavigateToKnowledgeBase = useCallback(() => {
|
||||
if (knowledgeBaseId && workspaceId) {
|
||||
window.open(`/workspace/${workspaceId}/knowledge/${knowledgeBaseId}`, '_blank')
|
||||
}
|
||||
}, [workspaceId, knowledgeBaseId])
|
||||
|
||||
const externalLink = useMemo(() => {
|
||||
if (config.type === 'table-selector' && hasSelectedTable) {
|
||||
return {
|
||||
show: true,
|
||||
onClick: handleNavigateToTable,
|
||||
tooltip: 'View table',
|
||||
}
|
||||
}
|
||||
if (config.type === 'knowledge-base-selector' && hasSelectedKnowledgeBase) {
|
||||
return {
|
||||
show: true,
|
||||
onClick: handleNavigateToKnowledgeBase,
|
||||
tooltip: 'View knowledge base',
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}, [
|
||||
config.type,
|
||||
hasSelectedTable,
|
||||
handleNavigateToTable,
|
||||
hasSelectedKnowledgeBase,
|
||||
handleNavigateToKnowledgeBase,
|
||||
])
|
||||
|
||||
/**
|
||||
* Handles wand icon click to activate inline prompt mode.
|
||||
|
||||
@@ -119,6 +119,14 @@ export function SearchModal({
|
||||
href: `/workspace/${workspaceId}/knowledge`,
|
||||
hidden: permissionConfig.hideKnowledgeBaseTab,
|
||||
},
|
||||
// TODO: Uncomment when working on tables
|
||||
// {
|
||||
// id: 'tables',
|
||||
// name: 'Tables',
|
||||
// icon: Table,
|
||||
// href: `/workspace/${workspaceId}/tables`,
|
||||
// hidden: permissionConfig.hideTablesTab,
|
||||
// },
|
||||
{
|
||||
id: 'help',
|
||||
name: 'Help',
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { memo, useCallback, useEffect, useMemo, useRef, useState } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings, Table } from 'lucide-react'
|
||||
import { Database, HelpCircle, Layout, Plus, Search, Settings } from 'lucide-react'
|
||||
import Link from 'next/link'
|
||||
import { useParams, usePathname, useRouter } from 'next/navigation'
|
||||
import { Button, Download, FolderPlus, Library, Loader, Tooltip } from '@/components/emcn'
|
||||
@@ -268,12 +268,14 @@ export const Sidebar = memo(function Sidebar() {
|
||||
href: `/workspace/${workspaceId}/knowledge`,
|
||||
hidden: permissionConfig.hideKnowledgeBaseTab,
|
||||
},
|
||||
{
|
||||
id: 'tables',
|
||||
label: 'Tables',
|
||||
icon: Table,
|
||||
href: `/workspace/${workspaceId}/tables`,
|
||||
},
|
||||
// TODO: Uncomment when working on tables
|
||||
// {
|
||||
// id: 'tables',
|
||||
// label: 'Tables',
|
||||
// icon: Table,
|
||||
// href: `/workspace/${workspaceId}/tables`,
|
||||
// hidden: permissionConfig.hideTablesTab,
|
||||
// },
|
||||
{
|
||||
id: 'help',
|
||||
label: 'Help',
|
||||
|
||||
@@ -485,14 +485,6 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric string inputs to numbers
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
if (params.offset) {
|
||||
params.offset = Number(params.offset)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'ahrefs_domain_rating':
|
||||
return 'ahrefs_domain_rating'
|
||||
@@ -514,6 +506,12 @@ Return ONLY the date string in YYYY-MM-DD format - no explanations, no quotes, n
|
||||
return 'ahrefs_domain_rating'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (params.offset) result.offset = Number(params.offset)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
646
apps/sim/blocks/blocks/algolia.ts
Normal file
646
apps/sim/blocks/blocks/algolia.ts
Normal file
@@ -0,0 +1,646 @@
|
||||
import { AlgoliaIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
|
||||
export const AlgoliaBlock: BlockConfig = {
|
||||
type: 'algolia',
|
||||
name: 'Algolia',
|
||||
description: 'Search and manage Algolia indices',
|
||||
longDescription:
|
||||
'Integrate Algolia into your workflow. Search indices, manage records (add, update, delete, browse), configure index settings, and perform batch operations.',
|
||||
docsLink: 'https://docs.sim.ai/tools/algolia',
|
||||
category: 'tools',
|
||||
bgColor: '#003DFF',
|
||||
icon: AlgoliaIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Search', id: 'search' },
|
||||
{ label: 'Add Record', id: 'add_record' },
|
||||
{ label: 'Get Record', id: 'get_record' },
|
||||
{ label: 'Get Records', id: 'get_records' },
|
||||
{ label: 'Partial Update Record', id: 'partial_update_record' },
|
||||
{ label: 'Delete Record', id: 'delete_record' },
|
||||
{ label: 'Browse Records', id: 'browse_records' },
|
||||
{ label: 'Batch Operations', id: 'batch_operations' },
|
||||
{ label: 'List Indices', id: 'list_indices' },
|
||||
{ label: 'Get Settings', id: 'get_settings' },
|
||||
{ label: 'Update Settings', id: 'update_settings' },
|
||||
{ label: 'Delete Index', id: 'delete_index' },
|
||||
{ label: 'Copy/Move Index', id: 'copy_move_index' },
|
||||
{ label: 'Clear Records', id: 'clear_records' },
|
||||
{ label: 'Delete By Filter', id: 'delete_by_filter' },
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
// Index name - needed for all except list_indices
|
||||
{
|
||||
id: 'indexName',
|
||||
title: 'Index Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'my_index',
|
||||
condition: { field: 'operation', value: 'list_indices', not: true },
|
||||
required: { field: 'operation', value: 'list_indices', not: true },
|
||||
},
|
||||
// Search fields
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter search query',
|
||||
condition: { field: 'operation', value: ['search', 'browse_records'] },
|
||||
required: { field: 'operation', value: 'search' },
|
||||
},
|
||||
{
|
||||
id: 'hitsPerPage',
|
||||
title: 'Hits Per Page',
|
||||
type: 'short-input',
|
||||
placeholder: '20',
|
||||
condition: { field: 'operation', value: ['search', 'browse_records'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'page',
|
||||
title: 'Page',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'filters',
|
||||
title: 'Filters',
|
||||
type: 'short-input',
|
||||
placeholder: 'category:electronics AND price < 100',
|
||||
condition: { field: 'operation', value: ['search', 'browse_records'] },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an Algolia filter expression based on the user's description.
|
||||
|
||||
Available operators: AND, OR, NOT
|
||||
Comparison: =, !=, <, >, <=, >=
|
||||
Facet filters: attribute:value
|
||||
Numeric filters: attribute operator value
|
||||
Boolean filters: attribute:true / attribute:false
|
||||
Tag filters: _tags:value
|
||||
|
||||
Examples:
|
||||
- "category:electronics AND price < 100"
|
||||
- "brand:Apple OR brand:Samsung"
|
||||
- "inStock:true AND NOT category:deprecated"
|
||||
- "(category:electronics OR category:books) AND price >= 10"
|
||||
|
||||
Return ONLY the filter string, no quotes or explanation.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'attributesToRetrieve',
|
||||
title: 'Attributes to Retrieve',
|
||||
type: 'short-input',
|
||||
placeholder: 'name,description,price',
|
||||
condition: { field: 'operation', value: ['search', 'get_record', 'browse_records'] },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Browse cursor
|
||||
{
|
||||
id: 'cursor',
|
||||
title: 'Cursor',
|
||||
type: 'short-input',
|
||||
placeholder: 'Cursor from previous browse response',
|
||||
condition: { field: 'operation', value: 'browse_records' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Add record fields
|
||||
{
|
||||
id: 'record',
|
||||
title: 'Record',
|
||||
type: 'long-input',
|
||||
placeholder: '{"name": "Product", "price": 29.99}',
|
||||
condition: { field: 'operation', value: 'add_record' },
|
||||
required: { field: 'operation', value: 'add_record' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON object for an Algolia record based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON object starting with { and ending with }
|
||||
- Include relevant attributes as key-value pairs
|
||||
- Do NOT include objectID unless the user explicitly specifies one
|
||||
- Use appropriate types: strings, numbers, booleans, arrays
|
||||
|
||||
### EXAMPLE
|
||||
User: "A product with name, price, and categories"
|
||||
Output:
|
||||
{"name": "Example Product", "price": 29.99, "categories": ["electronics", "gadgets"]}
|
||||
|
||||
Return ONLY the JSON object.`,
|
||||
placeholder: 'Describe the record to add...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
// Partial update fields
|
||||
{
|
||||
id: 'attributes',
|
||||
title: 'Attributes to Update',
|
||||
type: 'long-input',
|
||||
placeholder: '{"price": 24.99, "stock": {"_operation": "Decrement", "value": 1}}',
|
||||
condition: { field: 'operation', value: 'partial_update_record' },
|
||||
required: { field: 'operation', value: 'partial_update_record' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON object for an Algolia partial update based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON object starting with { and ending with }
|
||||
- For simple updates, use key-value pairs: {"price": 24.99}
|
||||
- For built-in operations, use the _operation syntax:
|
||||
- Increment: {"count": {"_operation": "Increment", "value": 1}}
|
||||
- Decrement: {"stock": {"_operation": "Decrement", "value": 1}}
|
||||
- Add to array: {"tags": {"_operation": "Add", "value": "new-tag"}}
|
||||
- Remove from array: {"tags": {"_operation": "Remove", "value": "old-tag"}}
|
||||
- AddUnique: {"tags": {"_operation": "AddUnique", "value": "unique-tag"}}
|
||||
- IncrementFrom: {"version": {"_operation": "IncrementFrom", "value": 0}}
|
||||
- IncrementSet: {"views": {"_operation": "IncrementSet", "value": 1}}
|
||||
|
||||
### EXAMPLE
|
||||
User: "Decrease stock by 2 and add a sale tag"
|
||||
Output:
|
||||
{"stock": {"_operation": "Decrement", "value": 2}, "tags": {"_operation": "Add", "value": "sale"}}
|
||||
|
||||
Return ONLY the JSON object.`,
|
||||
placeholder: 'Describe the attributes to update...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'createIfNotExists',
|
||||
title: 'Create If Not Exists',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Yes', id: 'true' },
|
||||
{ label: 'No', id: 'false' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'partial_update_record' },
|
||||
value: () => 'true',
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Batch operations field
|
||||
{
|
||||
id: 'requests',
|
||||
title: 'Batch Requests',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'[{"action": "addObject", "body": {"name": "Item"}}, {"action": "deleteObject", "body": {"objectID": "123"}}]',
|
||||
condition: { field: 'operation', value: 'batch_operations' },
|
||||
required: { field: 'operation', value: 'batch_operations' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON array of Algolia batch operations based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON array starting with [ and ending with ]
|
||||
- Each item must have "action" and "body" properties
|
||||
- Valid actions: addObject, updateObject, partialUpdateObject, partialUpdateObjectNoCreate, deleteObject, delete, clear
|
||||
- For deleteObject, body must include objectID
|
||||
- For updateObject, body must include objectID
|
||||
- For addObject, objectID is optional (auto-generated if omitted)
|
||||
|
||||
### EXAMPLE
|
||||
User: "Add two products and delete one with ID old-123"
|
||||
Output:
|
||||
[
|
||||
{"action": "addObject", "body": {"name": "Product A", "price": 19.99}},
|
||||
{"action": "addObject", "body": {"name": "Product B", "price": 29.99}},
|
||||
{"action": "deleteObject", "body": {"objectID": "old-123"}}
|
||||
]
|
||||
|
||||
Return ONLY the JSON array.`,
|
||||
placeholder: 'Describe the batch operations to perform...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
// Update settings fields
|
||||
{
|
||||
id: 'settings',
|
||||
title: 'Settings',
|
||||
type: 'long-input',
|
||||
placeholder:
|
||||
'{"searchableAttributes": ["name", "description"], "customRanking": ["desc(popularity)"]}',
|
||||
condition: { field: 'operation', value: 'update_settings' },
|
||||
required: { field: 'operation', value: 'update_settings' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a valid Algolia index settings JSON object based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON object starting with { and ending with }
|
||||
- Common settings include:
|
||||
- searchableAttributes: array of attribute names (ordered by priority)
|
||||
- attributesForFaceting: array of attributes for filtering/faceting (prefix with "filterOnly(" or "searchable(" as needed)
|
||||
- customRanking: array of "asc(attr)" or "desc(attr)" expressions
|
||||
- ranking: array of ranking criteria (e.g., "typo", "geo", "words", "filters", "proximity", "attribute", "exact", "custom")
|
||||
- replicas: array of replica index names
|
||||
- hitsPerPage: number of results per page
|
||||
- paginationLimitedTo: max pagination depth
|
||||
- highlightPreTag / highlightPostTag: HTML tags for highlighting
|
||||
|
||||
### EXAMPLE
|
||||
User: "Make name and description searchable, add category faceting, rank by popularity"
|
||||
Output:
|
||||
{"searchableAttributes": ["name", "description"], "attributesForFaceting": ["category"], "customRanking": ["desc(popularity)"]}
|
||||
|
||||
Return ONLY the JSON object.`,
|
||||
placeholder: 'Describe the settings to apply...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'forwardToReplicas',
|
||||
title: 'Forward to Replicas',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'No', id: 'false' },
|
||||
{ label: 'Yes', id: 'true' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'update_settings' },
|
||||
value: () => 'false',
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Copy/Move index fields
|
||||
{
|
||||
id: 'copyMoveOperation',
|
||||
title: 'Copy or Move',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Copy', id: 'copy' },
|
||||
{ label: 'Move', id: 'move' },
|
||||
],
|
||||
condition: { field: 'operation', value: 'copy_move_index' },
|
||||
value: () => 'copy',
|
||||
},
|
||||
{
|
||||
id: 'destination',
|
||||
title: 'Destination Index',
|
||||
type: 'short-input',
|
||||
placeholder: 'my_index_backup',
|
||||
condition: { field: 'operation', value: 'copy_move_index' },
|
||||
required: { field: 'operation', value: 'copy_move_index' },
|
||||
},
|
||||
{
|
||||
id: 'scope',
|
||||
title: 'Scope (Copy Only)',
|
||||
type: 'short-input',
|
||||
placeholder: '["settings", "synonyms", "rules"]',
|
||||
condition: { field: 'operation', value: 'copy_move_index' },
|
||||
mode: 'advanced',
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON array of Algolia copy scopes based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON array
|
||||
- Valid scope values: "settings", "synonyms", "rules"
|
||||
- Omitting scope copies everything including records
|
||||
- Only applies to copy operations, not move
|
||||
|
||||
### EXAMPLE
|
||||
User: "Copy only settings and synonyms"
|
||||
Output:
|
||||
["settings", "synonyms"]
|
||||
|
||||
Return ONLY the JSON array.`,
|
||||
placeholder: 'Describe what to copy...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
// Delete by filter fields
|
||||
{
|
||||
id: 'deleteFilters',
|
||||
title: 'Filter Expression',
|
||||
type: 'short-input',
|
||||
placeholder: 'category:outdated AND price < 10',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
required: { field: 'operation', value: 'delete_by_filter' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate an Algolia filter expression for deleting records based on the user's description.
|
||||
|
||||
Available operators: AND, OR, NOT
|
||||
Comparison: =, !=, <, >, <=, >=
|
||||
Facet filters: attribute:value
|
||||
Numeric filters: attribute operator value
|
||||
|
||||
Examples:
|
||||
- "category:outdated AND price < 10"
|
||||
- "status:archived OR lastUpdated < 1609459200"
|
||||
- "NOT category:active"
|
||||
|
||||
Return ONLY the filter string, no quotes or explanation.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'facetFilters',
|
||||
title: 'Facet Filters',
|
||||
type: 'short-input',
|
||||
placeholder: '["brand:Acme"]',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'numericFilters',
|
||||
title: 'Numeric Filters',
|
||||
type: 'short-input',
|
||||
placeholder: '["price > 100"]',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'tagFilters',
|
||||
title: 'Tag Filters',
|
||||
type: 'short-input',
|
||||
placeholder: '["published", "archived"]',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'aroundLatLng',
|
||||
title: 'Around Lat/Lng',
|
||||
type: 'short-input',
|
||||
placeholder: '40.71,-74.01',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'aroundRadius',
|
||||
title: 'Around Radius (m)',
|
||||
type: 'short-input',
|
||||
placeholder: '1000 or "all"',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'insideBoundingBox',
|
||||
title: 'Inside Bounding Box',
|
||||
type: 'short-input',
|
||||
placeholder: '[[47.3165,0.757,47.3424,0.8012]]',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'insidePolygon',
|
||||
title: 'Inside Polygon',
|
||||
type: 'short-input',
|
||||
placeholder: '[[47.3165,0.757,47.3424,0.8012,47.33,0.78]]',
|
||||
condition: { field: 'operation', value: 'delete_by_filter' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Get records (batch) field
|
||||
{
|
||||
id: 'getRecordsRequests',
|
||||
title: 'Record Requests',
|
||||
type: 'long-input',
|
||||
placeholder: '[{"objectID": "id1"}, {"objectID": "id2", "attributesToRetrieve": ["name"]}]',
|
||||
condition: { field: 'operation', value: 'get_records' },
|
||||
required: { field: 'operation', value: 'get_records' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON array of Algolia get-records requests based on the user's description.
|
||||
|
||||
### CONTEXT
|
||||
{context}
|
||||
|
||||
### GUIDELINES
|
||||
- Return ONLY a valid JSON array starting with [ and ending with ]
|
||||
- Each item must have "objectID" (required)
|
||||
- Optionally include "indexName" to fetch from a different index
|
||||
- Optionally include "attributesToRetrieve" as an array of attribute names
|
||||
|
||||
### EXAMPLE
|
||||
User: "Get products with IDs abc and xyz, only returning name and price"
|
||||
Output:
|
||||
[{"objectID": "abc", "attributesToRetrieve": ["name", "price"]}, {"objectID": "xyz", "attributesToRetrieve": ["name", "price"]}]
|
||||
|
||||
Return ONLY the JSON array.`,
|
||||
placeholder: 'Describe the records to retrieve...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
// List indices pagination
|
||||
{
|
||||
id: 'listPage',
|
||||
title: 'Page',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'list_indices' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'listHitsPerPage',
|
||||
title: 'Indices Per Page',
|
||||
type: 'short-input',
|
||||
placeholder: '100',
|
||||
condition: { field: 'operation', value: 'list_indices' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Object ID - for add (optional), get, partial update, delete
|
||||
{
|
||||
id: 'objectID',
|
||||
title: 'Object ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'my-record-123',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['add_record', 'get_record', 'partial_update_record', 'delete_record'],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['get_record', 'partial_update_record', 'delete_record'],
|
||||
},
|
||||
},
|
||||
// Common credentials
|
||||
{
|
||||
id: 'applicationId',
|
||||
title: 'Application ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Your Algolia Application ID',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Your Algolia API Key',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'algolia_search',
|
||||
'algolia_add_record',
|
||||
'algolia_get_record',
|
||||
'algolia_get_records',
|
||||
'algolia_partial_update_record',
|
||||
'algolia_delete_record',
|
||||
'algolia_browse_records',
|
||||
'algolia_batch_operations',
|
||||
'algolia_list_indices',
|
||||
'algolia_get_settings',
|
||||
'algolia_update_settings',
|
||||
'algolia_delete_index',
|
||||
'algolia_copy_move_index',
|
||||
'algolia_clear_records',
|
||||
'algolia_delete_by_filter',
|
||||
],
|
||||
config: {
|
||||
tool: (params: Record<string, unknown>) => {
|
||||
const op = params.operation as string
|
||||
if (op === 'partial_update_record') {
|
||||
params.createIfNotExists = params.createIfNotExists !== 'false'
|
||||
}
|
||||
if (op === 'update_settings' && params.forwardToReplicas === 'true') {
|
||||
params.forwardToReplicas = true
|
||||
} else if (op === 'update_settings') {
|
||||
params.forwardToReplicas = false
|
||||
}
|
||||
if (op === 'copy_move_index') {
|
||||
params.operation = params.copyMoveOperation
|
||||
}
|
||||
if (op === 'delete_by_filter') {
|
||||
params.filters = params.deleteFilters
|
||||
}
|
||||
if (op === 'get_records') {
|
||||
params.requests = params.getRecordsRequests
|
||||
}
|
||||
if (op === 'list_indices') {
|
||||
if (params.listPage !== undefined) params.page = params.listPage
|
||||
if (params.listHitsPerPage !== undefined) params.hitsPerPage = params.listHitsPerPage
|
||||
}
|
||||
return `algolia_${op}`
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
indexName: { type: 'string', description: 'Algolia index name' },
|
||||
query: { type: 'string', description: 'Search query' },
|
||||
hitsPerPage: { type: 'string', description: 'Number of hits per page' },
|
||||
page: { type: 'string', description: 'Page number' },
|
||||
filters: { type: 'string', description: 'Algolia filter string' },
|
||||
attributesToRetrieve: { type: 'string', description: 'Attributes to retrieve' },
|
||||
cursor: { type: 'string', description: 'Browse cursor for pagination' },
|
||||
record: { type: 'json', description: 'Record data to add' },
|
||||
attributes: { type: 'json', description: 'Attributes to partially update' },
|
||||
createIfNotExists: { type: 'string', description: 'Create record if not exists' },
|
||||
requests: { type: 'json', description: 'Batch operation requests' },
|
||||
settings: { type: 'json', description: 'Index settings to update' },
|
||||
forwardToReplicas: { type: 'string', description: 'Forward settings to replicas' },
|
||||
objectID: { type: 'string', description: 'Object ID' },
|
||||
copyMoveOperation: { type: 'string', description: 'Copy or move operation' },
|
||||
destination: { type: 'string', description: 'Destination index name' },
|
||||
scope: { type: 'json', description: 'Scopes to copy (settings, synonyms, rules)' },
|
||||
deleteFilters: { type: 'string', description: 'Filter expression for delete by filter' },
|
||||
facetFilters: { type: 'json', description: 'Facet filters for delete by filter' },
|
||||
numericFilters: { type: 'json', description: 'Numeric filters for delete by filter' },
|
||||
tagFilters: {
|
||||
type: 'json',
|
||||
description: 'Tag filters using the _tags attribute for delete by filter',
|
||||
},
|
||||
aroundLatLng: { type: 'string', description: 'Geo-search coordinates (lat,lng)' },
|
||||
aroundRadius: { type: 'string', description: 'Geo-search radius in meters or "all"' },
|
||||
insideBoundingBox: { type: 'json', description: 'Bounding box coordinates for geo-search' },
|
||||
insidePolygon: { type: 'json', description: 'Polygon coordinates for geo-search' },
|
||||
getRecordsRequests: {
|
||||
type: 'json',
|
||||
description: 'Array of objects with objectID to retrieve multiple records',
|
||||
},
|
||||
listPage: { type: 'string', description: 'Page number for list indices pagination' },
|
||||
listHitsPerPage: { type: 'string', description: 'Indices per page for list indices' },
|
||||
applicationId: { type: 'string', description: 'Algolia Application ID' },
|
||||
apiKey: { type: 'string', description: 'Algolia API Key' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
hits: { type: 'array', description: 'Search result hits or browsed records' },
|
||||
nbHits: { type: 'number', description: 'Total number of hits' },
|
||||
page: { type: 'number', description: 'Current page number (zero-based)' },
|
||||
nbPages: { type: 'number', description: 'Total number of pages available' },
|
||||
hitsPerPage: { type: 'number', description: 'Number of hits per page' },
|
||||
processingTimeMS: {
|
||||
type: 'number',
|
||||
description: 'Server-side processing time in milliseconds',
|
||||
},
|
||||
query: { type: 'string', description: 'Search query that was executed' },
|
||||
parsedQuery: { type: 'string', description: 'Query after normalization and stop word removal' },
|
||||
facets: { type: 'json', description: 'Facet counts by facet name' },
|
||||
facets_stats: {
|
||||
type: 'json',
|
||||
description: 'Statistics (min, max, avg, sum) for numeric facets',
|
||||
},
|
||||
exhaustive: { type: 'json', description: 'Exhaustiveness flags for the search results' },
|
||||
taskID: { type: 'number', description: 'Algolia task ID for tracking async operations' },
|
||||
objectID: { type: 'string', description: 'Object ID of the affected record' },
|
||||
objectIDs: { type: 'array', description: 'Object IDs affected by batch operations' },
|
||||
createdAt: { type: 'string', description: 'ISO 8601 timestamp when the record was created' },
|
||||
updatedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 timestamp when the record or settings were updated',
|
||||
},
|
||||
deletedAt: {
|
||||
type: 'string',
|
||||
description: 'ISO 8601 timestamp when the record or index was deleted',
|
||||
},
|
||||
record: { type: 'json', description: 'Retrieved record data (user-defined attributes)' },
|
||||
results: { type: 'array', description: 'Array of retrieved records from get_records' },
|
||||
cursor: {
|
||||
type: 'string',
|
||||
description:
|
||||
'Opaque cursor string for retrieving the next page of browse results. Absent when no more results exist.',
|
||||
},
|
||||
indices: { type: 'array', description: 'List of indices in the application' },
|
||||
searchableAttributes: { type: 'array', description: 'List of searchable attributes' },
|
||||
attributesForFaceting: { type: 'array', description: 'Attributes configured for faceting' },
|
||||
ranking: { type: 'array', description: 'Ranking criteria for the index' },
|
||||
customRanking: { type: 'array', description: 'Custom ranking criteria' },
|
||||
replicas: { type: 'array', description: 'List of replica index names' },
|
||||
maxValuesPerFacet: {
|
||||
type: 'number',
|
||||
description: 'Maximum number of facet values returned (default 100)',
|
||||
},
|
||||
highlightPreTag: {
|
||||
type: 'string',
|
||||
description: 'HTML tag inserted before highlighted parts (default "<em>")',
|
||||
},
|
||||
highlightPostTag: {
|
||||
type: 'string',
|
||||
description: 'HTML tag inserted after highlighted parts (default "</em>")',
|
||||
},
|
||||
paginationLimitedTo: {
|
||||
type: 'number',
|
||||
description: 'Maximum number of hits accessible via pagination (default 1000)',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -110,11 +110,6 @@ export const ArxivBlock: BlockConfig<ArxivResponse> = {
|
||||
access: ['arxiv_search', 'arxiv_get_paper', 'arxiv_get_author_papers'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert maxResults to a number for operations that use it
|
||||
if (params.maxResults) {
|
||||
params.maxResults = Number(params.maxResults)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'arxiv_search':
|
||||
return 'arxiv_search'
|
||||
@@ -126,6 +121,11 @@ export const ArxivBlock: BlockConfig<ArxivResponse> = {
|
||||
return 'arxiv_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.maxResults) result.maxResults = Number(params.maxResults)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -33,6 +33,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Browser Use LLM', id: 'browser-use-llm' },
|
||||
{ label: 'Browser Use 2.0', id: 'browser-use-2.0' },
|
||||
{ label: 'GPT-4o', id: 'gpt-4o' },
|
||||
{ label: 'GPT-4o Mini', id: 'gpt-4o-mini' },
|
||||
{ label: 'GPT-4.1', id: 'gpt-4.1' },
|
||||
@@ -42,6 +43,7 @@ export const BrowserUseBlock: BlockConfig<BrowserUseResponse> = {
|
||||
{ label: 'Gemini 2.5 Flash', id: 'gemini-2.5-flash' },
|
||||
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
|
||||
{ label: 'Gemini 3 Pro Preview', id: 'gemini-3-pro-preview' },
|
||||
{ label: 'Gemini 3 Flash Preview', id: 'gemini-3-flash-preview' },
|
||||
{ label: 'Gemini Flash Latest', id: 'gemini-flash-latest' },
|
||||
{ label: 'Gemini Flash Lite Latest', id: 'gemini-flash-lite-latest' },
|
||||
{ label: 'Claude 3.7 Sonnet', id: 'claude-3-7-sonnet-20250219' },
|
||||
|
||||
@@ -309,20 +309,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric params
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
if (params.maxResults) {
|
||||
params.maxResults = Number(params.maxResults)
|
||||
}
|
||||
|
||||
// Normalize file input for upload operation - use canonical 'file' param
|
||||
const normalizedFile = normalizeFileInput(params.file, { single: true })
|
||||
if (normalizedFile) {
|
||||
params.file = normalizedFile
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'dropbox_upload':
|
||||
return 'dropbox_upload'
|
||||
@@ -348,6 +334,16 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
return 'dropbox_upload'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (params.maxResults) result.maxResults = Number(params.maxResults)
|
||||
const normalizedFile = normalizeFileInput(params.file, { single: true })
|
||||
if (normalizedFile) {
|
||||
result.file = normalizedFile
|
||||
}
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -457,24 +457,19 @@ Return ONLY valid JSON - no explanations, no markdown code blocks.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric strings to numbers
|
||||
if (params.size) {
|
||||
params.size = Number(params.size)
|
||||
}
|
||||
if (params.from) {
|
||||
params.from = Number(params.from)
|
||||
}
|
||||
if (params.retryOnConflict) {
|
||||
params.retryOnConflict = Number(params.retryOnConflict)
|
||||
}
|
||||
// Append 's' to timeout for Elasticsearch time format
|
||||
if (params.timeout && !params.timeout.endsWith('s')) {
|
||||
params.timeout = `${params.timeout}s`
|
||||
}
|
||||
|
||||
// Return the operation as the tool ID
|
||||
return params.operation || 'elasticsearch_search'
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.size) result.size = Number(params.size)
|
||||
if (params.from) result.from = Number(params.from)
|
||||
if (params.retryOnConflict) result.retryOnConflict = Number(params.retryOnConflict)
|
||||
if (params.timeout && typeof params.timeout === 'string') {
|
||||
result.timeout = params.timeout.endsWith('s') ? params.timeout : `${params.timeout}s`
|
||||
}
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
@@ -49,6 +49,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
title: 'Use Autoprompt',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'type',
|
||||
@@ -62,6 +63,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
value: () => 'auto',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'includeDomains',
|
||||
@@ -69,6 +71,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'example.com, another.com (comma-separated)',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'excludeDomains',
|
||||
@@ -76,6 +79,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'exclude.com, another.com (comma-separated)',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'category',
|
||||
@@ -95,6 +99,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'text',
|
||||
@@ -107,12 +112,14 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'summary',
|
||||
title: 'Include Summary',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'livecrawl',
|
||||
@@ -125,6 +132,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
value: () => 'never',
|
||||
condition: { field: 'operation', value: 'exa_search' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Get Contents operation inputs
|
||||
{
|
||||
@@ -147,6 +155,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'Enter a query to guide the summary generation...',
|
||||
condition: { field: 'operation', value: 'exa_get_contents' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'subpages',
|
||||
@@ -154,6 +163,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'short-input',
|
||||
placeholder: '5',
|
||||
condition: { field: 'operation', value: 'exa_get_contents' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'subpageTarget',
|
||||
@@ -161,12 +171,14 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'docs, tutorial, about (comma-separated)',
|
||||
condition: { field: 'operation', value: 'exa_get_contents' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'highlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_get_contents' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Find Similar Links operation inputs
|
||||
{
|
||||
@@ -196,6 +208,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'example.com, another.com (comma-separated)',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'excludeDomains',
|
||||
@@ -203,12 +216,14 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
type: 'long-input',
|
||||
placeholder: 'exclude.com, another.com (comma-separated)',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'excludeSourceDomain',
|
||||
title: 'Exclude Source Domain',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'category',
|
||||
@@ -228,18 +243,21 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'highlights',
|
||||
title: 'Include Highlights',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'summary',
|
||||
title: 'Include Summary',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'livecrawl',
|
||||
@@ -252,6 +270,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
value: () => 'never',
|
||||
condition: { field: 'operation', value: 'exa_find_similar_links' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Answer operation inputs
|
||||
{
|
||||
@@ -267,6 +286,7 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
title: 'Include Text',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'exa_answer' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Research operation inputs
|
||||
{
|
||||
@@ -309,16 +329,6 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numResults to a number for operations that use it
|
||||
if (params.numResults) {
|
||||
params.numResults = Number(params.numResults)
|
||||
}
|
||||
|
||||
// Convert subpages to a number if provided
|
||||
if (params.subpages) {
|
||||
params.subpages = Number(params.subpages)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'exa_search':
|
||||
return 'exa_search'
|
||||
@@ -334,6 +344,16 @@ export const ExaBlock: BlockConfig<ExaResponse> = {
|
||||
return 'exa_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.numResults) {
|
||||
result.numResults = Number(params.numResults)
|
||||
}
|
||||
if (params.subpages) {
|
||||
result.subpages = Number(params.subpages)
|
||||
}
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -606,45 +606,23 @@ Return ONLY the folder title - no explanations, no quotes, no extra text.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric string fields to numbers
|
||||
if (params.panelId) {
|
||||
params.panelId = Number(params.panelId)
|
||||
}
|
||||
if (params.annotationId) {
|
||||
params.annotationId = Number(params.annotationId)
|
||||
}
|
||||
if (params.time) {
|
||||
params.time = Number(params.time)
|
||||
}
|
||||
if (params.timeEnd) {
|
||||
params.timeEnd = Number(params.timeEnd)
|
||||
}
|
||||
if (params.from) {
|
||||
params.from = Number(params.from)
|
||||
}
|
||||
if (params.to) {
|
||||
params.to = Number(params.to)
|
||||
}
|
||||
|
||||
// Map subblock fields to tool parameter names
|
||||
if (params.alertTitle) {
|
||||
params.title = params.alertTitle
|
||||
}
|
||||
if (params.folderTitle) {
|
||||
params.title = params.folderTitle
|
||||
}
|
||||
if (params.folderUidNew) {
|
||||
params.uid = params.folderUidNew
|
||||
}
|
||||
if (params.annotationTags) {
|
||||
params.tags = params.annotationTags
|
||||
}
|
||||
if (params.annotationDashboardUid) {
|
||||
params.dashboardUid = params.annotationDashboardUid
|
||||
}
|
||||
|
||||
if (params.alertTitle) params.title = params.alertTitle
|
||||
if (params.folderTitle) params.title = params.folderTitle
|
||||
if (params.folderUidNew) params.uid = params.folderUidNew
|
||||
if (params.annotationTags) params.tags = params.annotationTags
|
||||
if (params.annotationDashboardUid) params.dashboardUid = params.annotationDashboardUid
|
||||
return params.operation
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.panelId) result.panelId = Number(params.panelId)
|
||||
if (params.annotationId) result.annotationId = Number(params.annotationId)
|
||||
if (params.time) result.time = Number(params.time)
|
||||
if (params.timeEnd) result.timeEnd = Number(params.timeEnd)
|
||||
if (params.from) result.from = Number(params.from)
|
||||
if (params.to) result.to = Number(params.to)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
446
apps/sim/blocks/blocks/hex.ts
Normal file
446
apps/sim/blocks/blocks/hex.ts
Normal file
@@ -0,0 +1,446 @@
|
||||
import { HexIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { HexResponse } from '@/tools/hex/types'
|
||||
|
||||
export const HexBlock: BlockConfig<HexResponse> = {
|
||||
type: 'hex',
|
||||
name: 'Hex',
|
||||
description: 'Run and manage Hex projects',
|
||||
longDescription:
|
||||
'Integrate Hex into your workflow. Run projects, check run status, manage collections and groups, list users, and view data connections. Requires a Hex API token.',
|
||||
docsLink: 'https://docs.sim.ai/tools/hex',
|
||||
category: 'tools',
|
||||
bgColor: '#F5E6FF',
|
||||
icon: HexIcon,
|
||||
authMode: AuthMode.ApiKey,
|
||||
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Run Project', id: 'run_project' },
|
||||
{ label: 'Get Run Status', id: 'get_run_status' },
|
||||
{ label: 'Get Project Runs', id: 'get_project_runs' },
|
||||
{ label: 'Cancel Run', id: 'cancel_run' },
|
||||
{ label: 'List Projects', id: 'list_projects' },
|
||||
{ label: 'Get Project', id: 'get_project' },
|
||||
{ label: 'Update Project', id: 'update_project' },
|
||||
{ label: 'Get Queried Tables', id: 'get_queried_tables' },
|
||||
{ label: 'List Users', id: 'list_users' },
|
||||
{ label: 'List Groups', id: 'list_groups' },
|
||||
{ label: 'Get Group', id: 'get_group' },
|
||||
{ label: 'List Collections', id: 'list_collections' },
|
||||
{ label: 'Get Collection', id: 'get_collection' },
|
||||
{ label: 'Create Collection', id: 'create_collection' },
|
||||
{ label: 'List Data Connections', id: 'list_data_connections' },
|
||||
{ label: 'Get Data Connection', id: 'get_data_connection' },
|
||||
],
|
||||
value: () => 'run_project',
|
||||
},
|
||||
{
|
||||
id: 'projectId',
|
||||
title: 'Project ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter project UUID',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'run_project',
|
||||
'get_run_status',
|
||||
'get_project_runs',
|
||||
'cancel_run',
|
||||
'get_project',
|
||||
'update_project',
|
||||
'get_queried_tables',
|
||||
],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'run_project',
|
||||
'get_run_status',
|
||||
'get_project_runs',
|
||||
'cancel_run',
|
||||
'get_project',
|
||||
'update_project',
|
||||
'get_queried_tables',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'runId',
|
||||
title: 'Run ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter run UUID',
|
||||
condition: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
|
||||
required: { field: 'operation', value: ['get_run_status', 'cancel_run'] },
|
||||
},
|
||||
{
|
||||
id: 'inputParams',
|
||||
title: 'Input Parameters',
|
||||
type: 'code',
|
||||
placeholder: '{"param_name": "value"}',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert at creating Hex project input parameters.
|
||||
Generate ONLY the raw JSON object based on the user's request.
|
||||
The output MUST be a single, valid JSON object, starting with { and ending with }.
|
||||
|
||||
Current parameters: {context}
|
||||
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON object.
|
||||
The keys should match the input parameter names defined in the Hex project.
|
||||
|
||||
Example:
|
||||
{
|
||||
"date_range": "2024-01-01",
|
||||
"department": "engineering",
|
||||
"include_inactive": false
|
||||
}`,
|
||||
placeholder: 'Describe the input parameters you need...',
|
||||
generationType: 'json-object',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'projectStatus',
|
||||
title: 'Status',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter status name (e.g., custom workspace status label)',
|
||||
condition: { field: 'operation', value: 'update_project' },
|
||||
required: { field: 'operation', value: 'update_project' },
|
||||
},
|
||||
{
|
||||
id: 'runStatusFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Pending', id: 'PENDING' },
|
||||
{ label: 'Running', id: 'RUNNING' },
|
||||
{ label: 'Completed', id: 'COMPLETED' },
|
||||
{ label: 'Errored', id: 'ERRORED' },
|
||||
{ label: 'Killed', id: 'KILLED' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'get_project_runs' },
|
||||
},
|
||||
{
|
||||
id: 'groupIdInput',
|
||||
title: 'Group ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter group UUID',
|
||||
condition: { field: 'operation', value: 'get_group' },
|
||||
required: { field: 'operation', value: 'get_group' },
|
||||
},
|
||||
{
|
||||
id: 'collectionId',
|
||||
title: 'Collection ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter collection UUID',
|
||||
condition: { field: 'operation', value: 'get_collection' },
|
||||
required: { field: 'operation', value: 'get_collection' },
|
||||
},
|
||||
{
|
||||
id: 'collectionName',
|
||||
title: 'Collection Name',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter collection name',
|
||||
condition: { field: 'operation', value: 'create_collection' },
|
||||
required: { field: 'operation', value: 'create_collection' },
|
||||
},
|
||||
{
|
||||
id: 'collectionDescription',
|
||||
title: 'Description',
|
||||
type: 'long-input',
|
||||
placeholder: 'Optional description for the collection',
|
||||
condition: { field: 'operation', value: 'create_collection' },
|
||||
},
|
||||
{
|
||||
id: 'dataConnectionId',
|
||||
title: 'Data Connection ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter data connection UUID',
|
||||
condition: { field: 'operation', value: 'get_data_connection' },
|
||||
required: { field: 'operation', value: 'get_data_connection' },
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your Hex API token',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
// Advanced fields
|
||||
{
|
||||
id: 'dryRun',
|
||||
title: 'Dry Run',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'updateCache',
|
||||
title: 'Update Cache',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'updatePublishedResults',
|
||||
title: 'Update Published Results',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'useCachedSqlResults',
|
||||
title: 'Use Cached SQL Results',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'run_project' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'limit',
|
||||
title: 'Limit',
|
||||
type: 'short-input',
|
||||
placeholder: '25',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'list_projects',
|
||||
'get_project_runs',
|
||||
'get_queried_tables',
|
||||
'list_users',
|
||||
'list_groups',
|
||||
'list_collections',
|
||||
'list_data_connections',
|
||||
],
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'offset',
|
||||
title: 'Offset',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'get_project_runs' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'includeArchived',
|
||||
title: 'Include Archived',
|
||||
type: 'switch',
|
||||
condition: { field: 'operation', value: 'list_projects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'statusFilter',
|
||||
title: 'Status Filter',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'All', id: '' },
|
||||
{ label: 'Published', id: 'PUBLISHED' },
|
||||
{ label: 'Draft', id: 'DRAFT' },
|
||||
],
|
||||
value: () => '',
|
||||
condition: { field: 'operation', value: 'list_projects' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'groupId',
|
||||
title: 'Filter by Group',
|
||||
type: 'short-input',
|
||||
placeholder: 'Group UUID (optional)',
|
||||
condition: { field: 'operation', value: 'list_users' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
],
|
||||
|
||||
tools: {
|
||||
access: [
|
||||
'hex_cancel_run',
|
||||
'hex_create_collection',
|
||||
'hex_get_collection',
|
||||
'hex_get_data_connection',
|
||||
'hex_get_group',
|
||||
'hex_get_project',
|
||||
'hex_get_project_runs',
|
||||
'hex_get_queried_tables',
|
||||
'hex_get_run_status',
|
||||
'hex_list_collections',
|
||||
'hex_list_data_connections',
|
||||
'hex_list_groups',
|
||||
'hex_list_projects',
|
||||
'hex_list_users',
|
||||
'hex_run_project',
|
||||
'hex_update_project',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
switch (params.operation) {
|
||||
case 'run_project':
|
||||
return 'hex_run_project'
|
||||
case 'get_run_status':
|
||||
return 'hex_get_run_status'
|
||||
case 'get_project_runs':
|
||||
return 'hex_get_project_runs'
|
||||
case 'cancel_run':
|
||||
return 'hex_cancel_run'
|
||||
case 'list_projects':
|
||||
return 'hex_list_projects'
|
||||
case 'get_project':
|
||||
return 'hex_get_project'
|
||||
case 'update_project':
|
||||
return 'hex_update_project'
|
||||
case 'get_queried_tables':
|
||||
return 'hex_get_queried_tables'
|
||||
case 'list_users':
|
||||
return 'hex_list_users'
|
||||
case 'list_groups':
|
||||
return 'hex_list_groups'
|
||||
case 'get_group':
|
||||
return 'hex_get_group'
|
||||
case 'list_collections':
|
||||
return 'hex_list_collections'
|
||||
case 'get_collection':
|
||||
return 'hex_get_collection'
|
||||
case 'create_collection':
|
||||
return 'hex_create_collection'
|
||||
case 'list_data_connections':
|
||||
return 'hex_list_data_connections'
|
||||
case 'get_data_connection':
|
||||
return 'hex_get_data_connection'
|
||||
default:
|
||||
return 'hex_run_project'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
const op = params.operation
|
||||
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (op === 'get_project_runs' && params.offset) result.offset = Number(params.offset)
|
||||
if (op === 'update_project' && params.projectStatus) result.status = params.projectStatus
|
||||
if (op === 'get_project_runs' && params.runStatusFilter)
|
||||
result.statusFilter = params.runStatusFilter
|
||||
if (op === 'get_group' && params.groupIdInput) result.groupId = params.groupIdInput
|
||||
if (op === 'list_users' && params.groupId) result.groupId = params.groupId
|
||||
if (op === 'create_collection' && params.collectionName) result.name = params.collectionName
|
||||
if (op === 'create_collection' && params.collectionDescription)
|
||||
result.description = params.collectionDescription
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
apiKey: { type: 'string', description: 'Hex API token' },
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
inputParams: { type: 'json', description: 'Input parameters for project run' },
|
||||
dryRun: { type: 'boolean', description: 'Perform a dry run without executing the project' },
|
||||
updateCache: {
|
||||
type: 'boolean',
|
||||
description: '(Deprecated) Update cached results after execution',
|
||||
},
|
||||
updatePublishedResults: {
|
||||
type: 'boolean',
|
||||
description: 'Update published app results after execution',
|
||||
},
|
||||
useCachedSqlResults: {
|
||||
type: 'boolean',
|
||||
description: 'Use cached SQL results instead of re-running queries',
|
||||
},
|
||||
projectStatus: {
|
||||
type: 'string',
|
||||
description: 'New project status name (custom workspace status label)',
|
||||
},
|
||||
limit: { type: 'number', description: 'Max number of results to return' },
|
||||
offset: { type: 'number', description: 'Offset for paginated results' },
|
||||
includeArchived: { type: 'boolean', description: 'Include archived projects' },
|
||||
statusFilter: { type: 'string', description: 'Filter projects by status' },
|
||||
runStatusFilter: { type: 'string', description: 'Filter runs by status' },
|
||||
groupId: { type: 'string', description: 'Filter users by group UUID' },
|
||||
groupIdInput: { type: 'string', description: 'Group UUID for get group' },
|
||||
collectionId: { type: 'string', description: 'Collection UUID' },
|
||||
collectionName: { type: 'string', description: 'Collection name' },
|
||||
collectionDescription: { type: 'string', description: 'Collection description' },
|
||||
dataConnectionId: { type: 'string', description: 'Data connection UUID' },
|
||||
},
|
||||
|
||||
outputs: {
|
||||
// Run creation outputs
|
||||
projectId: { type: 'string', description: 'Project UUID' },
|
||||
runId: { type: 'string', description: 'Run UUID' },
|
||||
runUrl: { type: 'string', description: 'URL to view the run' },
|
||||
runStatusUrl: { type: 'string', description: 'URL to check run status' },
|
||||
projectVersion: { type: 'number', description: 'Project version number' },
|
||||
// Run status outputs
|
||||
status: {
|
||||
type: 'json',
|
||||
description: 'Project status object ({ name }) or run status string',
|
||||
},
|
||||
startTime: { type: 'string', description: 'Run start time' },
|
||||
endTime: { type: 'string', description: 'Run end time' },
|
||||
elapsedTime: { type: 'number', description: 'Elapsed time in seconds' },
|
||||
traceId: { type: 'string', description: 'Trace ID for debugging' },
|
||||
// Project outputs
|
||||
id: { type: 'string', description: 'Resource ID' },
|
||||
title: { type: 'string', description: 'Project title' },
|
||||
name: { type: 'string', description: 'Resource name' },
|
||||
description: { type: 'string', description: 'Resource description' },
|
||||
type: { type: 'string', description: 'Project type (PROJECT or COMPONENT)' },
|
||||
createdAt: { type: 'string', description: 'Creation timestamp' },
|
||||
updatedAt: { type: 'string', description: 'Last update timestamp' },
|
||||
lastEditedAt: { type: 'string', description: 'Last edited timestamp' },
|
||||
lastPublishedAt: { type: 'string', description: 'Last published timestamp' },
|
||||
archivedAt: { type: 'string', description: 'Archived timestamp' },
|
||||
trashedAt: { type: 'string', description: 'Trashed timestamp' },
|
||||
// List outputs
|
||||
projects: {
|
||||
type: 'json',
|
||||
description: 'List of projects with id, title, status, type, creator, owner, createdAt',
|
||||
},
|
||||
runs: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of runs with runId, status, runUrl, startTime, endTime, elapsedTime, projectVersion',
|
||||
},
|
||||
users: { type: 'json', description: 'List of users with id, name, email, role' },
|
||||
groups: { type: 'json', description: 'List of groups with id, name, createdAt' },
|
||||
collections: {
|
||||
type: 'json',
|
||||
description: 'List of collections with id, name, description, creator',
|
||||
},
|
||||
connections: {
|
||||
type: 'json',
|
||||
description:
|
||||
'List of data connections with id, name, type, description, connectViaSsh, includeMagic, allowWritebackCells',
|
||||
},
|
||||
tables: {
|
||||
type: 'json',
|
||||
description: 'List of queried tables with dataConnectionId, dataConnectionName, tableName',
|
||||
},
|
||||
categories: {
|
||||
type: 'json',
|
||||
description: 'Project categories with name and description',
|
||||
},
|
||||
creator: { type: 'json', description: 'Creator details ({ email, id })' },
|
||||
owner: { type: 'json', description: 'Owner details ({ email })' },
|
||||
total: { type: 'number', description: 'Total results returned' },
|
||||
// Cancel output
|
||||
success: { type: 'boolean', description: 'Whether the operation succeeded' },
|
||||
// Data connection flags
|
||||
connectViaSsh: { type: 'boolean', description: 'SSH tunneling enabled' },
|
||||
includeMagic: { type: 'boolean', description: 'Magic AI features enabled' },
|
||||
allowWritebackCells: { type: 'boolean', description: 'Writeback cells allowed' },
|
||||
},
|
||||
}
|
||||
@@ -204,11 +204,6 @@ Return ONLY the search query text - no explanations.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric parameters
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'hunter_discover':
|
||||
return 'hunter_discover'
|
||||
@@ -226,6 +221,11 @@ Return ONLY the search query text - no explanations.`,
|
||||
return 'hunter_domain_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -826,16 +826,6 @@ Return ONLY the JSON array - no explanations or markdown formatting.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert page_size to a number if provided
|
||||
if (params.page_size) {
|
||||
params.page_size = Number(params.page_size)
|
||||
}
|
||||
|
||||
// Convert notify_incident_channel from string to boolean
|
||||
if (params.notify_incident_channel !== undefined) {
|
||||
params.notify_incident_channel = params.notify_incident_channel === 'true'
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'incidentio_incidents_list':
|
||||
return 'incidentio_incidents_list'
|
||||
@@ -929,6 +919,14 @@ Return ONLY the JSON array - no explanations or markdown formatting.`,
|
||||
return 'incidentio_incidents_list'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.page_size) result.page_size = Number(params.page_size)
|
||||
if (params.notify_incident_channel !== undefined) {
|
||||
result.notify_incident_channel = params.notify_incident_channel === 'true'
|
||||
}
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -100,6 +100,19 @@ export const JiraServiceManagementBlock: BlockConfig<JsmResponse> = {
|
||||
title: 'Service Desk ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter service desk ID',
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get_request_types',
|
||||
'create_request',
|
||||
'get_customers',
|
||||
'add_customer',
|
||||
'get_organizations',
|
||||
'add_organization',
|
||||
'get_queues',
|
||||
'get_request_type_fields',
|
||||
],
|
||||
},
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
@@ -207,9 +220,10 @@ Return ONLY the description text - no explanations.`,
|
||||
},
|
||||
{
|
||||
id: 'requestFieldValues',
|
||||
title: 'Custom Field Values',
|
||||
title: 'Request Field Values',
|
||||
type: 'long-input',
|
||||
placeholder: 'JSON object of custom field values (e.g., {"customfield_10010": "value"})',
|
||||
placeholder:
|
||||
'JSON object of field values (e.g., {"summary": "Title", "customfield_10010": "value"})',
|
||||
condition: { field: 'operation', value: 'create_request' },
|
||||
},
|
||||
{
|
||||
@@ -775,7 +789,7 @@ Return ONLY the comment text - no explanations.`,
|
||||
description: 'Comma-separated account IDs for request participants',
|
||||
},
|
||||
channel: { type: 'string', description: 'Channel (e.g., portal, email)' },
|
||||
requestFieldValues: { type: 'string', description: 'JSON object of custom field values' },
|
||||
requestFieldValues: { type: 'string', description: 'JSON object of request field values' },
|
||||
searchQuery: { type: 'string', description: 'Filter request types by name' },
|
||||
groupId: { type: 'string', description: 'Filter by request type group ID' },
|
||||
expand: { type: 'string', description: 'Comma-separated fields to expand' },
|
||||
|
||||
@@ -26,15 +26,29 @@ export const KnowledgeBlock: BlockConfig = {
|
||||
],
|
||||
value: () => 'search',
|
||||
},
|
||||
// Knowledge base selector - basic mode
|
||||
{
|
||||
id: 'knowledgeBaseId',
|
||||
id: 'knowledgeBaseSelector',
|
||||
title: 'Knowledge Base',
|
||||
type: 'knowledge-base-selector',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
mode: 'basic',
|
||||
placeholder: 'Select knowledge base',
|
||||
multiSelect: false,
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['search', 'upload_chunk', 'create_document'] },
|
||||
},
|
||||
// Knowledge base ID manual input - advanced mode
|
||||
{
|
||||
id: 'manualKnowledgeBaseId',
|
||||
title: 'Knowledge Base ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'knowledgeBaseId',
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter knowledge base ID',
|
||||
required: true,
|
||||
condition: { field: 'operation', value: ['search', 'upload_chunk', 'create_document'] },
|
||||
},
|
||||
{
|
||||
id: 'query',
|
||||
title: 'Search Query',
|
||||
|
||||
@@ -169,17 +169,7 @@ export const LemlistBlock: BlockConfig<LemlistResponse> = {
|
||||
access: ['lemlist_get_activities', 'lemlist_get_lead', 'lemlist_send_email'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
if (params.offset) {
|
||||
params.offset = Number(params.offset)
|
||||
}
|
||||
// Map filterLeadId to leadId for get_activities tool
|
||||
if (params.filterLeadId) {
|
||||
params.leadId = params.filterLeadId
|
||||
}
|
||||
|
||||
if (params.filterLeadId) params.leadId = params.filterLeadId
|
||||
switch (params.operation) {
|
||||
case 'get_activities':
|
||||
return 'lemlist_get_activities'
|
||||
@@ -191,6 +181,12 @@ export const LemlistBlock: BlockConfig<LemlistResponse> = {
|
||||
return 'lemlist_get_activities'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (params.offset) result.offset = Number(params.offset)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -149,66 +149,48 @@ export const ParallelBlock: BlockConfig<ToolResponse> = {
|
||||
access: ['parallel_search', 'parallel_extract', 'parallel_deep_research'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
if (params.extract_objective) params.objective = params.extract_objective
|
||||
if (params.research_input) params.input = params.research_input
|
||||
switch (params.operation) {
|
||||
case 'search':
|
||||
// Convert search_queries from comma-separated string to array (if provided)
|
||||
if (params.search_queries && typeof params.search_queries === 'string') {
|
||||
const queries = params.search_queries
|
||||
.split(',')
|
||||
.map((query: string) => query.trim())
|
||||
.filter((query: string) => query.length > 0)
|
||||
// Only set if we have actual queries
|
||||
if (queries.length > 0) {
|
||||
params.search_queries = queries
|
||||
} else {
|
||||
params.search_queries = undefined
|
||||
}
|
||||
}
|
||||
|
||||
// Convert numeric parameters
|
||||
if (params.max_results) {
|
||||
params.max_results = Number(params.max_results)
|
||||
}
|
||||
if (params.max_chars_per_result) {
|
||||
params.max_chars_per_result = Number(params.max_chars_per_result)
|
||||
}
|
||||
|
||||
return 'parallel_search'
|
||||
|
||||
case 'extract':
|
||||
// Map extract_objective to objective for the tool
|
||||
params.objective = params.extract_objective
|
||||
|
||||
// Convert boolean strings to actual booleans with defaults
|
||||
if (params.excerpts === 'true' || params.excerpts === true) {
|
||||
params.excerpts = true
|
||||
} else if (params.excerpts === 'false' || params.excerpts === false) {
|
||||
params.excerpts = false
|
||||
} else {
|
||||
// Default to true if not provided
|
||||
params.excerpts = true
|
||||
}
|
||||
|
||||
if (params.full_content === 'true' || params.full_content === true) {
|
||||
params.full_content = true
|
||||
} else if (params.full_content === 'false' || params.full_content === false) {
|
||||
params.full_content = false
|
||||
} else {
|
||||
// Default to false if not provided
|
||||
params.full_content = false
|
||||
}
|
||||
|
||||
return 'parallel_extract'
|
||||
|
||||
case 'deep_research':
|
||||
// Map research_input to input for the tool
|
||||
params.input = params.research_input
|
||||
return 'parallel_deep_research'
|
||||
|
||||
default:
|
||||
return 'parallel_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
const operation = params.operation
|
||||
|
||||
if (operation === 'search') {
|
||||
if (params.search_queries && typeof params.search_queries === 'string') {
|
||||
const queries = params.search_queries
|
||||
.split(',')
|
||||
.map((query: string) => query.trim())
|
||||
.filter((query: string) => query.length > 0)
|
||||
if (queries.length > 0) {
|
||||
result.search_queries = queries
|
||||
} else {
|
||||
result.search_queries = undefined
|
||||
}
|
||||
}
|
||||
if (params.max_results) result.max_results = Number(params.max_results)
|
||||
if (params.max_chars_per_result) {
|
||||
result.max_chars_per_result = Number(params.max_chars_per_result)
|
||||
}
|
||||
}
|
||||
|
||||
if (operation === 'extract') {
|
||||
result.excerpts = !(params.excerpts === 'false' || params.excerpts === false)
|
||||
result.full_content = params.full_content === 'true' || params.full_content === true
|
||||
}
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -1185,22 +1185,15 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric parameters
|
||||
if (params.limit) params.limit = Number(params.limit)
|
||||
if (params.offset) params.offset = Number(params.offset)
|
||||
if (params.rolloutPercentage) params.rolloutPercentage = Number(params.rolloutPercentage)
|
||||
|
||||
// Map projectIdParam to projectId for get_project operation
|
||||
// Field renames in tool() are safe (they copy values, not coerce types)
|
||||
// and are needed for serialization-time validation of required fields
|
||||
if (params.operation === 'posthog_get_project' && params.projectIdParam) {
|
||||
params.projectId = params.projectIdParam
|
||||
}
|
||||
|
||||
// Map personalApiKey to apiKey for all private endpoint tools
|
||||
if (params.personalApiKey) {
|
||||
params.apiKey = params.personalApiKey
|
||||
}
|
||||
|
||||
// Map featureFlagId to flagId for feature flag operations
|
||||
const flagOps = [
|
||||
'posthog_get_feature_flag',
|
||||
'posthog_update_feature_flag',
|
||||
@@ -1210,7 +1203,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
params.flagId = params.featureFlagId
|
||||
}
|
||||
|
||||
// Map surveyType to type for survey operations
|
||||
if (
|
||||
(params.operation === 'posthog_create_survey' ||
|
||||
params.operation === 'posthog_update_survey') &&
|
||||
@@ -1219,37 +1211,30 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
params.type = params.surveyType
|
||||
}
|
||||
|
||||
// Map isStatic for cohorts
|
||||
if (params.operation === 'posthog_create_cohort' && params.isStatic !== undefined) {
|
||||
params.is_static = params.isStatic
|
||||
}
|
||||
|
||||
// Map dateMarker to date_marker for annotations
|
||||
if (params.operation === 'posthog_create_annotation' && params.dateMarker) {
|
||||
params.date_marker = params.dateMarker
|
||||
}
|
||||
|
||||
// Map propertyType to property_type
|
||||
if (params.operation === 'posthog_update_property_definition' && params.propertyType) {
|
||||
params.property_type = params.propertyType
|
||||
}
|
||||
|
||||
// Map insightQuery to query for insights
|
||||
if (params.operation === 'posthog_create_insight' && params.insightQuery) {
|
||||
params.query = params.insightQuery
|
||||
}
|
||||
|
||||
// Map insightTags to tags for insights
|
||||
if (params.operation === 'posthog_create_insight' && params.insightTags) {
|
||||
params.tags = params.insightTags
|
||||
}
|
||||
|
||||
// Map distinctIdFilter to distinctId for list_persons
|
||||
if (params.operation === 'posthog_list_persons' && params.distinctIdFilter) {
|
||||
params.distinctId = params.distinctIdFilter
|
||||
}
|
||||
|
||||
// Map experiment date fields
|
||||
if (params.operation === 'posthog_create_experiment') {
|
||||
if (params.experimentStartDate) {
|
||||
params.startDate = params.experimentStartDate
|
||||
@@ -1259,7 +1244,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
}
|
||||
}
|
||||
|
||||
// Map survey date fields
|
||||
if (
|
||||
params.operation === 'posthog_create_survey' ||
|
||||
params.operation === 'posthog_update_survey'
|
||||
@@ -1272,13 +1256,17 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
}
|
||||
}
|
||||
|
||||
// Convert responsesLimit to number
|
||||
if (params.responsesLimit) {
|
||||
params.responsesLimit = Number(params.responsesLimit)
|
||||
}
|
||||
|
||||
return params.operation as string
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (params.offset) result.offset = Number(params.offset)
|
||||
if (params.rolloutPercentage) result.rolloutPercentage = Number(params.rolloutPercentage)
|
||||
if (params.responsesLimit) result.responsesLimit = Number(params.responsesLimit)
|
||||
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
|
||||
320
apps/sim/blocks/blocks/redis.ts
Normal file
320
apps/sim/blocks/blocks/redis.ts
Normal file
@@ -0,0 +1,320 @@
|
||||
import { RedisIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type {
|
||||
RedisCommandResponse,
|
||||
RedisDeleteResponse,
|
||||
RedisExistsResponse,
|
||||
RedisExpireResponse,
|
||||
RedisGetResponse,
|
||||
RedisHDelResponse,
|
||||
RedisHGetAllResponse,
|
||||
RedisHGetResponse,
|
||||
RedisHSetResponse,
|
||||
RedisIncrbyResponse,
|
||||
RedisIncrResponse,
|
||||
RedisKeysResponse,
|
||||
RedisLLenResponse,
|
||||
RedisLPopResponse,
|
||||
RedisLPushResponse,
|
||||
RedisLRangeResponse,
|
||||
RedisPersistResponse,
|
||||
RedisRPopResponse,
|
||||
RedisRPushResponse,
|
||||
RedisSetnxResponse,
|
||||
RedisSetResponse,
|
||||
RedisTtlResponse,
|
||||
} from '@/tools/redis/types'
|
||||
|
||||
type RedisResponse =
|
||||
| RedisGetResponse
|
||||
| RedisSetResponse
|
||||
| RedisDeleteResponse
|
||||
| RedisKeysResponse
|
||||
| RedisCommandResponse
|
||||
| RedisHSetResponse
|
||||
| RedisHGetResponse
|
||||
| RedisHGetAllResponse
|
||||
| RedisHDelResponse
|
||||
| RedisIncrResponse
|
||||
| RedisIncrbyResponse
|
||||
| RedisExpireResponse
|
||||
| RedisTtlResponse
|
||||
| RedisPersistResponse
|
||||
| RedisLPushResponse
|
||||
| RedisRPushResponse
|
||||
| RedisLPopResponse
|
||||
| RedisRPopResponse
|
||||
| RedisLLenResponse
|
||||
| RedisLRangeResponse
|
||||
| RedisExistsResponse
|
||||
| RedisSetnxResponse
|
||||
|
||||
const KEY_OPERATIONS = [
|
||||
'get',
|
||||
'set',
|
||||
'delete',
|
||||
'hset',
|
||||
'hget',
|
||||
'hgetall',
|
||||
'hdel',
|
||||
'incr',
|
||||
'incrby',
|
||||
'exists',
|
||||
'setnx',
|
||||
'lpush',
|
||||
'rpush',
|
||||
'lpop',
|
||||
'rpop',
|
||||
'llen',
|
||||
'lrange',
|
||||
'expire',
|
||||
'persist',
|
||||
'ttl',
|
||||
] as const
|
||||
|
||||
export const RedisBlock: BlockConfig<RedisResponse> = {
|
||||
type: 'redis',
|
||||
name: 'Redis',
|
||||
description: 'Key-value operations with Redis',
|
||||
longDescription:
|
||||
'Connect to any Redis instance to perform key-value, hash, list, and utility operations via a direct connection.',
|
||||
docsLink: 'https://docs.sim.ai/tools/redis',
|
||||
category: 'tools',
|
||||
bgColor: '#FF4438',
|
||||
authMode: AuthMode.ApiKey,
|
||||
icon: RedisIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Get', id: 'get' },
|
||||
{ label: 'Set', id: 'set' },
|
||||
{ label: 'Delete', id: 'delete' },
|
||||
{ label: 'List Keys', id: 'keys' },
|
||||
{ label: 'HSET', id: 'hset' },
|
||||
{ label: 'HGET', id: 'hget' },
|
||||
{ label: 'HGETALL', id: 'hgetall' },
|
||||
{ label: 'HDEL', id: 'hdel' },
|
||||
{ label: 'INCR', id: 'incr' },
|
||||
{ label: 'INCRBY', id: 'incrby' },
|
||||
{ label: 'EXISTS', id: 'exists' },
|
||||
{ label: 'SETNX', id: 'setnx' },
|
||||
{ label: 'LPUSH', id: 'lpush' },
|
||||
{ label: 'RPUSH', id: 'rpush' },
|
||||
{ label: 'LPOP', id: 'lpop' },
|
||||
{ label: 'RPOP', id: 'rpop' },
|
||||
{ label: 'LLEN', id: 'llen' },
|
||||
{ label: 'LRANGE', id: 'lrange' },
|
||||
{ label: 'EXPIRE', id: 'expire' },
|
||||
{ label: 'PERSIST', id: 'persist' },
|
||||
{ label: 'TTL', id: 'ttl' },
|
||||
{ label: 'Command', id: 'command' },
|
||||
],
|
||||
value: () => 'get',
|
||||
},
|
||||
{
|
||||
id: 'url',
|
||||
title: 'Connection URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'redis://user:password@host:port',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'key',
|
||||
title: 'Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'my-key',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [...KEY_OPERATIONS],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [...KEY_OPERATIONS],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'value',
|
||||
title: 'Value',
|
||||
type: 'long-input',
|
||||
placeholder: 'Value to store',
|
||||
condition: { field: 'operation', value: ['set', 'setnx', 'hset', 'lpush', 'rpush'] },
|
||||
required: { field: 'operation', value: ['set', 'setnx', 'hset', 'lpush', 'rpush'] },
|
||||
},
|
||||
{
|
||||
id: 'ex',
|
||||
title: 'Expiration (seconds)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional TTL in seconds',
|
||||
condition: { field: 'operation', value: 'set' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'field',
|
||||
title: 'Field',
|
||||
type: 'short-input',
|
||||
placeholder: 'Hash field name',
|
||||
condition: { field: 'operation', value: ['hset', 'hget', 'hdel'] },
|
||||
required: { field: 'operation', value: ['hset', 'hget', 'hdel'] },
|
||||
},
|
||||
{
|
||||
id: 'pattern',
|
||||
title: 'Pattern',
|
||||
type: 'short-input',
|
||||
placeholder: '* (all keys) or user:* (prefix match)',
|
||||
condition: { field: 'operation', value: 'keys' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'seconds',
|
||||
title: 'Seconds',
|
||||
type: 'short-input',
|
||||
placeholder: 'Timeout in seconds',
|
||||
condition: { field: 'operation', value: 'expire' },
|
||||
required: { field: 'operation', value: 'expire' },
|
||||
},
|
||||
{
|
||||
id: 'increment',
|
||||
title: 'Increment',
|
||||
type: 'short-input',
|
||||
placeholder: 'Amount to increment by (negative to decrement)',
|
||||
condition: { field: 'operation', value: 'incrby' },
|
||||
required: { field: 'operation', value: 'incrby' },
|
||||
},
|
||||
{
|
||||
id: 'start',
|
||||
title: 'Start Index',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'lrange' },
|
||||
required: { field: 'operation', value: 'lrange' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'stop',
|
||||
title: 'Stop Index',
|
||||
type: 'short-input',
|
||||
placeholder: '-1 (all elements)',
|
||||
condition: { field: 'operation', value: 'lrange' },
|
||||
required: { field: 'operation', value: 'lrange' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'command',
|
||||
title: 'Command',
|
||||
type: 'code',
|
||||
placeholder: '["HSET", "myhash", "field1", "value1"]',
|
||||
condition: { field: 'operation', value: 'command' },
|
||||
required: { field: 'operation', value: 'command' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'redis_get',
|
||||
'redis_set',
|
||||
'redis_delete',
|
||||
'redis_keys',
|
||||
'redis_command',
|
||||
'redis_hset',
|
||||
'redis_hget',
|
||||
'redis_hgetall',
|
||||
'redis_hdel',
|
||||
'redis_incr',
|
||||
'redis_incrby',
|
||||
'redis_expire',
|
||||
'redis_ttl',
|
||||
'redis_persist',
|
||||
'redis_lpush',
|
||||
'redis_rpush',
|
||||
'redis_lpop',
|
||||
'redis_rpop',
|
||||
'redis_llen',
|
||||
'redis_lrange',
|
||||
'redis_exists',
|
||||
'redis_setnx',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
if (params.ex) {
|
||||
params.ex = Number(params.ex)
|
||||
}
|
||||
if (params.seconds !== undefined) {
|
||||
params.seconds = Number(params.seconds)
|
||||
}
|
||||
if (params.start !== undefined) {
|
||||
params.start = Number(params.start)
|
||||
}
|
||||
if (params.stop !== undefined) {
|
||||
params.stop = Number(params.stop)
|
||||
}
|
||||
if (params.increment !== undefined) {
|
||||
params.increment = Number(params.increment)
|
||||
}
|
||||
return `redis_${params.operation}`
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Redis operation to perform' },
|
||||
url: { type: 'string', description: 'Redis connection URL' },
|
||||
key: { type: 'string', description: 'Redis key' },
|
||||
value: { type: 'string', description: 'Value to store' },
|
||||
ex: { type: 'number', description: 'Expiration time in seconds (SET)' },
|
||||
field: { type: 'string', description: 'Hash field name (HSET/HGET/HDEL)' },
|
||||
pattern: { type: 'string', description: 'Pattern to match keys (KEYS)' },
|
||||
seconds: { type: 'number', description: 'Timeout in seconds (EXPIRE)' },
|
||||
start: { type: 'number', description: 'Start index (LRANGE)' },
|
||||
stop: { type: 'number', description: 'Stop index (LRANGE)' },
|
||||
command: { type: 'string', description: 'Redis command as JSON array (Command)' },
|
||||
increment: { type: 'number', description: 'Amount to increment by (INCRBY)' },
|
||||
},
|
||||
outputs: {
|
||||
value: {
|
||||
type: 'json',
|
||||
description:
|
||||
'Retrieved value (Get, HGET, LPOP, RPOP: string or null) or new value after increment (INCR, INCRBY: number)',
|
||||
},
|
||||
result: {
|
||||
type: 'json',
|
||||
description: 'Operation result (Set, HSET, EXPIRE, PERSIST, Command operations)',
|
||||
},
|
||||
deletedCount: { type: 'number', description: 'Number of keys deleted (Delete operation)' },
|
||||
deleted: { type: 'number', description: 'Number of fields deleted (HDEL operation)' },
|
||||
keys: { type: 'array', description: 'List of keys matching the pattern (Keys operation)' },
|
||||
count: { type: 'number', description: 'Number of items found (Keys, LRANGE operations)' },
|
||||
key: { type: 'string', description: 'The key operated on' },
|
||||
fields: {
|
||||
type: 'json',
|
||||
description: 'Hash field-value pairs keyed by field name (HGETALL operation)',
|
||||
},
|
||||
fieldCount: { type: 'number', description: 'Number of fields in the hash (HGETALL operation)' },
|
||||
field: { type: 'string', description: 'Hash field name (HSET, HGET, HDEL operations)' },
|
||||
ttl: {
|
||||
type: 'number',
|
||||
description:
|
||||
'Remaining TTL in seconds. Positive integer if TTL set, -1 if no expiration, -2 if key does not exist.',
|
||||
},
|
||||
length: {
|
||||
type: 'number',
|
||||
description: 'List length (LPUSH, RPUSH, LLEN operations)',
|
||||
},
|
||||
values: {
|
||||
type: 'array',
|
||||
description: 'List elements in the specified range (LRANGE operation)',
|
||||
},
|
||||
command: { type: 'string', description: 'The command that was executed (Command operation)' },
|
||||
pattern: { type: 'string', description: 'The pattern used to match keys (Keys operation)' },
|
||||
exists: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the key exists (true) or not (false) (EXISTS operation)',
|
||||
},
|
||||
wasSet: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the key was set (true) or already existed (false) (SETNX operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
327
apps/sim/blocks/blocks/revenuecat.ts
Normal file
327
apps/sim/blocks/blocks/revenuecat.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
import { RevenueCatIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type { RevenueCatResponse } from '@/tools/revenuecat/types'
|
||||
|
||||
export const RevenueCatBlock: BlockConfig<RevenueCatResponse> = {
|
||||
type: 'revenuecat',
|
||||
name: 'RevenueCat',
|
||||
description: 'Manage in-app subscriptions and entitlements',
|
||||
authMode: AuthMode.ApiKey,
|
||||
longDescription:
|
||||
'Integrate RevenueCat into the workflow. Manage subscribers, entitlements, offerings, and Google Play subscriptions. Retrieve customer subscription status, grant or revoke promotional entitlements, record purchases, update subscriber attributes, and manage Google Play subscription billing.',
|
||||
docsLink: 'https://docs.sim.ai/tools/revenuecat',
|
||||
category: 'tools',
|
||||
bgColor: '#F25A5A',
|
||||
icon: RevenueCatIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Get Customer', id: 'get_customer' },
|
||||
{ label: 'Delete Customer', id: 'delete_customer' },
|
||||
{ label: 'Create Purchase', id: 'create_purchase' },
|
||||
{ label: 'Grant Entitlement', id: 'grant_entitlement' },
|
||||
{ label: 'Revoke Entitlement', id: 'revoke_entitlement' },
|
||||
{ label: 'List Offerings', id: 'list_offerings' },
|
||||
{ label: 'Update Subscriber Attributes', id: 'update_subscriber_attributes' },
|
||||
{ label: 'Defer Google Subscription', id: 'defer_google_subscription' },
|
||||
{ label: 'Refund Google Subscription', id: 'refund_google_subscription' },
|
||||
{ label: 'Revoke Google Subscription', id: 'revoke_google_subscription' },
|
||||
],
|
||||
value: () => 'get_customer',
|
||||
},
|
||||
{
|
||||
id: 'apiKey',
|
||||
title: 'API Key',
|
||||
type: 'short-input',
|
||||
password: true,
|
||||
placeholder: 'Enter your RevenueCat API key',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'appUserId',
|
||||
title: 'App User ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter the app user ID',
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'entitlementIdentifier',
|
||||
title: 'Entitlement Identifier',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., premium, pro',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['grant_entitlement', 'revoke_entitlement'],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['grant_entitlement', 'revoke_entitlement'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'duration',
|
||||
title: 'Duration',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Daily', id: 'daily' },
|
||||
{ label: '3 Days', id: 'three_day' },
|
||||
{ label: 'Weekly', id: 'weekly' },
|
||||
{ label: 'Monthly', id: 'monthly' },
|
||||
{ label: '2 Months', id: 'two_month' },
|
||||
{ label: '3 Months', id: 'three_month' },
|
||||
{ label: '6 Months', id: 'six_month' },
|
||||
{ label: 'Yearly', id: 'yearly' },
|
||||
{ label: 'Lifetime', id: 'lifetime' },
|
||||
],
|
||||
value: () => 'monthly',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'grant_entitlement',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'startTimeMs',
|
||||
title: 'Start Time (ms)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional start time in ms since epoch',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'grant_entitlement',
|
||||
},
|
||||
mode: 'advanced',
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a Unix epoch timestamp in milliseconds based on the user's description.
|
||||
The timestamp should represent the start time of a promotional entitlement.
|
||||
Setting a start time in the past allows shorter effective durations.
|
||||
Examples:
|
||||
- "right now" -> current time in milliseconds
|
||||
- "1 hour ago" -> current time minus 3600000 milliseconds
|
||||
- "yesterday" -> current time minus 86400000 milliseconds
|
||||
|
||||
Return ONLY the numeric timestamp, no text.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'fetchToken',
|
||||
title: 'Fetch Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Store receipt or purchase token (e.g., sub_...)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'productId',
|
||||
title: 'Product ID',
|
||||
type: 'short-input',
|
||||
placeholder: 'Product identifier',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'create_purchase',
|
||||
'defer_google_subscription',
|
||||
'refund_google_subscription',
|
||||
'revoke_google_subscription',
|
||||
],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'create_purchase',
|
||||
'defer_google_subscription',
|
||||
'refund_google_subscription',
|
||||
'revoke_google_subscription',
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'price',
|
||||
title: 'Price',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., 9.99',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'currency',
|
||||
title: 'Currency',
|
||||
type: 'short-input',
|
||||
placeholder: 'e.g., USD',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'isRestore',
|
||||
title: 'Is Restore',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'No', id: 'false' },
|
||||
{ label: 'Yes', id: 'true' },
|
||||
],
|
||||
value: () => 'false',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'purchasePlatform',
|
||||
title: 'Platform',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'iOS', id: 'ios' },
|
||||
{ label: 'Android', id: 'android' },
|
||||
{ label: 'Amazon', id: 'amazon' },
|
||||
{ label: 'macOS', id: 'macos' },
|
||||
{ label: 'Stripe', id: 'stripe' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'create_purchase',
|
||||
},
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'attributes',
|
||||
title: 'Attributes',
|
||||
type: 'long-input',
|
||||
placeholder: '{"$email": {"value": "user@example.com"}}',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update_subscriber_attributes',
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'update_subscriber_attributes',
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
prompt: `Generate a JSON object of RevenueCat subscriber attributes based on the user's description.
|
||||
Each attribute key maps to an object with a "value" field.
|
||||
Reserved attribute keys start with "$": $email, $displayName, $phoneNumber, $mediaSource, $campaign, $adGroup, $ad, $keyword, $creative, $iterableUserId, $iterableCampaignId, $iterableTemplateId, $onesignalId, $airshipChannelId, $cleverTapId, $firebaseAppInstanceId.
|
||||
Custom attributes use plain keys without "$".
|
||||
|
||||
Examples:
|
||||
- "set email to john@example.com and name to John" ->
|
||||
{"$email": {"value": "john@example.com"}, "$displayName": {"value": "John"}}
|
||||
- "set plan to premium and team to acme" ->
|
||||
{"plan": {"value": "premium"}, "team": {"value": "acme"}}
|
||||
|
||||
Return ONLY valid JSON.`,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'extendByDays',
|
||||
title: 'Extend By Days',
|
||||
type: 'short-input',
|
||||
placeholder: 'Number of days to extend (1-365)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'defer_google_subscription',
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'defer_google_subscription',
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'platform',
|
||||
title: 'Platform',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'iOS', id: 'ios' },
|
||||
{ label: 'Android', id: 'android' },
|
||||
{ label: 'Amazon', id: 'amazon' },
|
||||
{ label: 'macOS', id: 'macos' },
|
||||
{ label: 'Stripe', id: 'stripe' },
|
||||
],
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'list_offerings',
|
||||
},
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'revenuecat_get_customer',
|
||||
'revenuecat_delete_customer',
|
||||
'revenuecat_create_purchase',
|
||||
'revenuecat_grant_entitlement',
|
||||
'revenuecat_revoke_entitlement',
|
||||
'revenuecat_list_offerings',
|
||||
'revenuecat_update_subscriber_attributes',
|
||||
'revenuecat_defer_google_subscription',
|
||||
'revenuecat_refund_google_subscription',
|
||||
'revenuecat_revoke_google_subscription',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
if (params.purchasePlatform && params.operation === 'create_purchase') {
|
||||
params.platform = params.purchasePlatform
|
||||
}
|
||||
if (params.isRestore !== undefined) {
|
||||
params.isRestore = params.isRestore === 'true'
|
||||
}
|
||||
if (params.price !== undefined && params.price !== '') {
|
||||
params.price = Number(params.price)
|
||||
}
|
||||
if (params.extendByDays !== undefined && params.extendByDays !== '') {
|
||||
params.extendByDays = Number(params.extendByDays)
|
||||
}
|
||||
if (params.startTimeMs !== undefined && params.startTimeMs !== '') {
|
||||
params.startTimeMs = Number(params.startTimeMs)
|
||||
}
|
||||
return `revenuecat_${params.operation}`
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
apiKey: { type: 'string', description: 'RevenueCat API key' },
|
||||
appUserId: { type: 'string', description: 'App user ID' },
|
||||
entitlementIdentifier: { type: 'string', description: 'Entitlement identifier' },
|
||||
duration: { type: 'string', description: 'Promotional entitlement duration' },
|
||||
startTimeMs: { type: 'number', description: 'Custom start time in ms since epoch' },
|
||||
fetchToken: { type: 'string', description: 'Store receipt or purchase token' },
|
||||
productId: { type: 'string', description: 'Product identifier' },
|
||||
price: { type: 'number', description: 'Product price' },
|
||||
currency: { type: 'string', description: 'ISO 4217 currency code' },
|
||||
isRestore: { type: 'boolean', description: 'Whether this is a restore purchase' },
|
||||
purchasePlatform: { type: 'string', description: 'Platform for the purchase' },
|
||||
attributes: { type: 'string', description: 'JSON object of subscriber attributes' },
|
||||
extendByDays: { type: 'number', description: 'Number of days to extend (1-365)' },
|
||||
platform: { type: 'string', description: 'Platform filter for offerings' },
|
||||
},
|
||||
outputs: {
|
||||
subscriber: {
|
||||
type: 'json',
|
||||
description: 'Subscriber object with subscriptions and entitlements',
|
||||
},
|
||||
offerings: {
|
||||
type: 'json',
|
||||
description: 'Array of offerings with packages',
|
||||
},
|
||||
current_offering_id: { type: 'string', description: 'Current offering identifier' },
|
||||
metadata: { type: 'json', description: 'Operation metadata' },
|
||||
deleted: { type: 'boolean', description: 'Whether the subscriber was deleted' },
|
||||
app_user_id: { type: 'string', description: 'The app user ID' },
|
||||
updated: { type: 'boolean', description: 'Whether the attributes were updated' },
|
||||
},
|
||||
}
|
||||
@@ -602,11 +602,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric fields
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
|
||||
// Return the appropriate tool based on operation
|
||||
switch (params.operation) {
|
||||
case 'sentry_issues_list':
|
||||
@@ -637,6 +632,11 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
return 'sentry_issues_list'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -9,10 +9,10 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
type: 'slack',
|
||||
name: 'Slack',
|
||||
description:
|
||||
'Send, update, delete messages, add reactions in Slack or trigger workflows from Slack events',
|
||||
'Send, update, delete messages, send ephemeral messages, add reactions in Slack or trigger workflows from Slack events',
|
||||
authMode: AuthMode.OAuth,
|
||||
longDescription:
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
'Integrate Slack into the workflow. Can send, update, and delete messages, send ephemeral messages visible only to a specific user, create canvases, read messages, and add reactions. Requires Bot Token instead of OAuth in advanced mode. Can be used in trigger mode to trigger a workflow when a message is sent to a channel.',
|
||||
docsLink: 'https://docs.sim.ai/tools/slack',
|
||||
category: 'tools',
|
||||
bgColor: '#611f69',
|
||||
@@ -25,6 +25,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Send Message', id: 'send' },
|
||||
{ label: 'Send Ephemeral Message', id: 'ephemeral' },
|
||||
{ label: 'Create Canvas', id: 'canvas' },
|
||||
{ label: 'Read Messages', id: 'read' },
|
||||
{ label: 'Get Message', id: 'get_message' },
|
||||
@@ -116,15 +117,21 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Select Slack channel',
|
||||
mode: 'basic',
|
||||
dependsOn: { all: ['authMethod'], any: ['credential', 'botToken'] },
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
condition: (values?: Record<string, unknown>) => {
|
||||
const op = values?.operation as string
|
||||
if (op === 'ephemeral') {
|
||||
return { field: 'operation', value: 'ephemeral' }
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
},
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
not: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
@@ -135,15 +142,21 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
canonicalParamId: 'channel',
|
||||
placeholder: 'Enter Slack channel ID (e.g., C1234567890)',
|
||||
mode: 'advanced',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
condition: (values?: Record<string, unknown>) => {
|
||||
const op = values?.operation as string
|
||||
if (op === 'ephemeral') {
|
||||
return { field: 'operation', value: 'ephemeral' }
|
||||
}
|
||||
return {
|
||||
field: 'operation',
|
||||
value: ['list_channels', 'list_users', 'get_user'],
|
||||
not: true,
|
||||
},
|
||||
and: {
|
||||
field: 'destinationType',
|
||||
value: 'dm',
|
||||
not: true,
|
||||
},
|
||||
}
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
@@ -175,6 +188,31 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'ephemeralUser',
|
||||
title: 'Target User',
|
||||
type: 'short-input',
|
||||
placeholder: 'User ID who will see the message (e.g., U1234567890)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'ephemeral',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'messageFormat',
|
||||
title: 'Message Format',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Plain Text', id: 'text' },
|
||||
{ label: 'Block Kit', id: 'blocks' },
|
||||
],
|
||||
value: () => 'text',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'text',
|
||||
title: 'Message',
|
||||
@@ -182,9 +220,77 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Enter your message (supports Slack mrkdwn)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'send',
|
||||
value: ['send', 'ephemeral'],
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral'],
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 'blocks',
|
||||
title: 'Block Kit Blocks',
|
||||
type: 'code',
|
||||
language: 'json',
|
||||
placeholder: 'JSON array of Block Kit blocks',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
and: { field: 'messageFormat', value: 'blocks' },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: ['send', 'ephemeral', 'update'],
|
||||
and: { field: 'messageFormat', value: 'blocks' },
|
||||
},
|
||||
wandConfig: {
|
||||
enabled: true,
|
||||
maintainHistory: true,
|
||||
prompt: `You are an expert at Slack Block Kit.
|
||||
Generate ONLY a valid JSON array of Block Kit blocks based on the user's request.
|
||||
The output MUST be a JSON array starting with [ and ending with ].
|
||||
|
||||
Current blocks: {context}
|
||||
|
||||
Available block types for messages:
|
||||
- "section": Displays text with an optional accessory element. Text uses { "type": "mrkdwn", "text": "..." } or { "type": "plain_text", "text": "..." }.
|
||||
- "header": Large text header. Text must be plain_text.
|
||||
- "divider": A horizontal rule separator. No fields needed besides type.
|
||||
- "image": Displays an image. Requires "image_url" and "alt_text".
|
||||
- "context": Contextual info with an "elements" array of image and text objects.
|
||||
- "actions": Interactive elements like buttons. Each button needs "type": "button", a "text" object, and an "action_id".
|
||||
- "rich_text": Structured rich text with "elements" array of rich_text_section objects.
|
||||
|
||||
Example output:
|
||||
[
|
||||
{
|
||||
"type": "header",
|
||||
"text": { "type": "plain_text", "text": "Order Confirmation" }
|
||||
},
|
||||
{
|
||||
"type": "section",
|
||||
"text": { "type": "mrkdwn", "text": "Your order *#1234* has been confirmed." }
|
||||
},
|
||||
{ "type": "divider" },
|
||||
{
|
||||
"type": "actions",
|
||||
"elements": [
|
||||
{
|
||||
"type": "button",
|
||||
"text": { "type": "plain_text", "text": "View Order" },
|
||||
"action_id": "view_order",
|
||||
"url": "https://example.com/orders/1234"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
You can reference workflow variables using angle brackets, e.g., <blockName.output>.
|
||||
Do not include any explanations, markdown formatting, or other text outside the JSON array.`,
|
||||
placeholder: 'Describe the Block Kit layout you want to create...',
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'threadTs',
|
||||
@@ -193,7 +299,7 @@ export const SlackBlock: BlockConfig<SlackResponse> = {
|
||||
placeholder: 'Reply to thread (e.g., 1405894322.002768)',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'send',
|
||||
value: ['send', 'ephemeral'],
|
||||
},
|
||||
required: false,
|
||||
},
|
||||
@@ -456,8 +562,13 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: 'update',
|
||||
and: { field: 'messageFormat', value: 'blocks', not: true },
|
||||
},
|
||||
required: true,
|
||||
},
|
||||
// Delete Message specific fields
|
||||
{
|
||||
@@ -499,6 +610,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
tools: {
|
||||
access: [
|
||||
'slack_message',
|
||||
'slack_ephemeral_message',
|
||||
'slack_canvas',
|
||||
'slack_message_reader',
|
||||
'slack_get_message',
|
||||
@@ -517,6 +629,8 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
switch (params.operation) {
|
||||
case 'send':
|
||||
return 'slack_message'
|
||||
case 'ephemeral':
|
||||
return 'slack_ephemeral_message'
|
||||
case 'canvas':
|
||||
return 'slack_canvas'
|
||||
case 'read':
|
||||
@@ -554,13 +668,16 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
destinationType,
|
||||
channel,
|
||||
dmUserId,
|
||||
messageFormat,
|
||||
text,
|
||||
title,
|
||||
content,
|
||||
limit,
|
||||
oldest,
|
||||
files,
|
||||
blocks,
|
||||
threadTs,
|
||||
ephemeralUser,
|
||||
updateTimestamp,
|
||||
updateText,
|
||||
deleteTimestamp,
|
||||
@@ -602,10 +719,13 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
|
||||
switch (operation) {
|
||||
case 'send': {
|
||||
baseParams.text = text
|
||||
baseParams.text = messageFormat === 'blocks' && !text ? ' ' : text
|
||||
if (threadTs) {
|
||||
baseParams.threadTs = threadTs
|
||||
}
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
// files is the canonical param from attachmentFiles (basic) or files (advanced)
|
||||
const normalizedFiles = normalizeFileInput(files)
|
||||
if (normalizedFiles) {
|
||||
@@ -614,6 +734,18 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
break
|
||||
}
|
||||
|
||||
case 'ephemeral': {
|
||||
baseParams.text = messageFormat === 'blocks' && !text ? ' ' : text
|
||||
baseParams.user = ephemeralUser ? String(ephemeralUser).trim() : ''
|
||||
if (threadTs) {
|
||||
baseParams.threadTs = threadTs
|
||||
}
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
case 'canvas':
|
||||
baseParams.title = title
|
||||
baseParams.content = content
|
||||
@@ -680,7 +812,10 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
|
||||
case 'update':
|
||||
baseParams.timestamp = updateTimestamp
|
||||
baseParams.text = updateText
|
||||
baseParams.text = messageFormat === 'blocks' && !updateText ? ' ' : updateText
|
||||
if (blocks) {
|
||||
baseParams.blocks = blocks
|
||||
}
|
||||
break
|
||||
|
||||
case 'delete':
|
||||
@@ -699,6 +834,7 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Operation to perform' },
|
||||
messageFormat: { type: 'string', description: 'Message format: text or blocks' },
|
||||
authMethod: { type: 'string', description: 'Authentication method' },
|
||||
destinationType: { type: 'string', description: 'Destination type (channel or dm)' },
|
||||
credential: { type: 'string', description: 'Slack access token' },
|
||||
@@ -731,6 +867,9 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
// List Users inputs
|
||||
includeDeleted: { type: 'string', description: 'Include deactivated users (true/false)' },
|
||||
userLimit: { type: 'string', description: 'Maximum number of users to return' },
|
||||
// Ephemeral message inputs
|
||||
ephemeralUser: { type: 'string', description: 'User ID who will see the ephemeral message' },
|
||||
blocks: { type: 'json', description: 'Block Kit layout blocks as a JSON array' },
|
||||
// Get User inputs
|
||||
userId: { type: 'string', description: 'User ID to look up' },
|
||||
// Get Message inputs
|
||||
@@ -758,6 +897,12 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
},
|
||||
files: { type: 'file[]', description: 'Files attached to the message' },
|
||||
|
||||
// slack_ephemeral_message outputs (ephemeral operation)
|
||||
messageTs: {
|
||||
type: 'string',
|
||||
description: 'Timestamp of the ephemeral message (cannot be used to update or delete)',
|
||||
},
|
||||
|
||||
// slack_canvas outputs
|
||||
canvas_id: { type: 'string', description: 'Canvas identifier for created canvases' },
|
||||
title: { type: 'string', description: 'Canvas title' },
|
||||
|
||||
@@ -755,43 +755,24 @@ export const SpotifyBlock: BlockConfig<ToolResponse> = {
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric parameters
|
||||
if (params.limit) {
|
||||
params.limit = Number(params.limit)
|
||||
}
|
||||
if (params.volume_percent) {
|
||||
params.volume_percent = Number(params.volume_percent)
|
||||
}
|
||||
if (params.range_start) {
|
||||
params.range_start = Number(params.range_start)
|
||||
}
|
||||
if (params.insert_before) {
|
||||
params.insert_before = Number(params.insert_before)
|
||||
}
|
||||
if (params.range_length) {
|
||||
params.range_length = Number(params.range_length)
|
||||
}
|
||||
if (params.position_ms) {
|
||||
params.position_ms = Number(params.position_ms)
|
||||
}
|
||||
// Map followType to type for check_following
|
||||
if (params.followType) {
|
||||
params.type = params.followType
|
||||
}
|
||||
// Map newName to name for update_playlist
|
||||
if (params.newName) {
|
||||
params.name = params.newName
|
||||
}
|
||||
// Map playUris to uris for play
|
||||
if (params.playUris) {
|
||||
params.uris = params.playUris
|
||||
}
|
||||
// Normalize file input for cover image
|
||||
if (params.coverImage !== undefined) {
|
||||
params.coverImage = normalizeFileInput(params.coverImage, { single: true })
|
||||
}
|
||||
if (params.followType) params.type = params.followType
|
||||
if (params.newName) params.name = params.newName
|
||||
if (params.playUris) params.uris = params.playUris
|
||||
return params.operation || 'spotify_search'
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.limit) result.limit = Number(params.limit)
|
||||
if (params.volume_percent) result.volume_percent = Number(params.volume_percent)
|
||||
if (params.range_start) result.range_start = Number(params.range_start)
|
||||
if (params.insert_before) result.insert_before = Number(params.insert_before)
|
||||
if (params.range_length) result.range_length = Number(params.range_length)
|
||||
if (params.position_ms) result.position_ms = Number(params.position_ms)
|
||||
if (params.coverImage !== undefined) {
|
||||
result.coverImage = normalizeFileInput(params.coverImage, { single: true })
|
||||
}
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -94,6 +94,7 @@ export const SttBlock: BlockConfig<SttBlockResponse> = {
|
||||
type: 'dropdown',
|
||||
condition: { field: 'provider', value: 'gemini' },
|
||||
options: [
|
||||
{ label: 'Gemini 3.1 Pro', id: 'gemini-3.1-pro-preview' },
|
||||
{ label: 'Gemini 3 Pro', id: 'gemini-3-pro-preview' },
|
||||
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
|
||||
{ label: 'Gemini 2.5 Flash', id: 'gemini-2.5-flash' },
|
||||
|
||||
@@ -202,14 +202,26 @@ export const TableBlock: BlockConfig<TableQueryResponse> = {
|
||||
value: () => 'query_rows',
|
||||
},
|
||||
|
||||
// Table selector (for all operations)
|
||||
// Table selector (for all operations) - basic mode
|
||||
{
|
||||
id: 'tableId',
|
||||
id: 'tableSelector',
|
||||
title: 'Table',
|
||||
type: 'table-selector',
|
||||
canonicalParamId: 'tableId',
|
||||
mode: 'basic',
|
||||
placeholder: 'Select a table',
|
||||
required: true,
|
||||
},
|
||||
// Table ID manual input - advanced mode
|
||||
{
|
||||
id: 'manualTableId',
|
||||
title: 'Table ID',
|
||||
type: 'short-input',
|
||||
canonicalParamId: 'tableId',
|
||||
mode: 'advanced',
|
||||
placeholder: 'Enter table ID',
|
||||
required: true,
|
||||
},
|
||||
|
||||
// Row ID for get/update/delete
|
||||
{
|
||||
|
||||
313
apps/sim/blocks/blocks/upstash.ts
Normal file
313
apps/sim/blocks/blocks/upstash.ts
Normal file
@@ -0,0 +1,313 @@
|
||||
import { UpstashIcon } from '@/components/icons'
|
||||
import type { BlockConfig } from '@/blocks/types'
|
||||
import { AuthMode } from '@/blocks/types'
|
||||
import type {
|
||||
UpstashRedisCommandResponse,
|
||||
UpstashRedisDeleteResponse,
|
||||
UpstashRedisExistsResponse,
|
||||
UpstashRedisExpireResponse,
|
||||
UpstashRedisGetResponse,
|
||||
UpstashRedisHGetAllResponse,
|
||||
UpstashRedisHGetResponse,
|
||||
UpstashRedisHSetResponse,
|
||||
UpstashRedisIncrbyResponse,
|
||||
UpstashRedisIncrResponse,
|
||||
UpstashRedisKeysResponse,
|
||||
UpstashRedisLPushResponse,
|
||||
UpstashRedisLRangeResponse,
|
||||
UpstashRedisSetnxResponse,
|
||||
UpstashRedisSetResponse,
|
||||
UpstashRedisTtlResponse,
|
||||
} from '@/tools/upstash/types'
|
||||
|
||||
type UpstashResponse =
|
||||
| UpstashRedisGetResponse
|
||||
| UpstashRedisSetResponse
|
||||
| UpstashRedisDeleteResponse
|
||||
| UpstashRedisKeysResponse
|
||||
| UpstashRedisCommandResponse
|
||||
| UpstashRedisHSetResponse
|
||||
| UpstashRedisHGetResponse
|
||||
| UpstashRedisHGetAllResponse
|
||||
| UpstashRedisIncrResponse
|
||||
| UpstashRedisIncrbyResponse
|
||||
| UpstashRedisExpireResponse
|
||||
| UpstashRedisTtlResponse
|
||||
| UpstashRedisLPushResponse
|
||||
| UpstashRedisLRangeResponse
|
||||
| UpstashRedisExistsResponse
|
||||
| UpstashRedisSetnxResponse
|
||||
|
||||
export const UpstashBlock: BlockConfig<UpstashResponse> = {
|
||||
type: 'upstash',
|
||||
name: 'Upstash',
|
||||
description: 'Serverless Redis with Upstash',
|
||||
longDescription:
|
||||
'Connect to Upstash Redis to perform key-value, hash, list, and utility operations via the REST API.',
|
||||
docsLink: 'https://docs.sim.ai/tools/upstash',
|
||||
category: 'tools',
|
||||
bgColor: '#181C1E',
|
||||
authMode: AuthMode.ApiKey,
|
||||
icon: UpstashIcon,
|
||||
subBlocks: [
|
||||
{
|
||||
id: 'operation',
|
||||
title: 'Operation',
|
||||
type: 'dropdown',
|
||||
options: [
|
||||
{ label: 'Get', id: 'get' },
|
||||
{ label: 'Set', id: 'set' },
|
||||
{ label: 'Delete', id: 'delete' },
|
||||
{ label: 'List Keys', id: 'keys' },
|
||||
{ label: 'HSET', id: 'hset' },
|
||||
{ label: 'HGET', id: 'hget' },
|
||||
{ label: 'HGETALL', id: 'hgetall' },
|
||||
{ label: 'INCR', id: 'incr' },
|
||||
{ label: 'INCRBY', id: 'incrby' },
|
||||
{ label: 'EXISTS', id: 'exists' },
|
||||
{ label: 'SETNX', id: 'setnx' },
|
||||
{ label: 'LPUSH', id: 'lpush' },
|
||||
{ label: 'LRANGE', id: 'lrange' },
|
||||
{ label: 'EXPIRE', id: 'expire' },
|
||||
{ label: 'TTL', id: 'ttl' },
|
||||
{ label: 'Command', id: 'command' },
|
||||
],
|
||||
value: () => 'get',
|
||||
},
|
||||
{
|
||||
id: 'restUrl',
|
||||
title: 'REST URL',
|
||||
type: 'short-input',
|
||||
placeholder: 'https://your-database.upstash.io',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
id: 'restToken',
|
||||
title: 'REST Token',
|
||||
type: 'short-input',
|
||||
placeholder: 'Enter your Upstash Redis REST token',
|
||||
password: true,
|
||||
required: true,
|
||||
},
|
||||
// Key field (used by most operations)
|
||||
{
|
||||
id: 'key',
|
||||
title: 'Key',
|
||||
type: 'short-input',
|
||||
placeholder: 'my-key',
|
||||
condition: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get',
|
||||
'set',
|
||||
'delete',
|
||||
'hset',
|
||||
'hget',
|
||||
'hgetall',
|
||||
'incr',
|
||||
'incrby',
|
||||
'exists',
|
||||
'setnx',
|
||||
'lpush',
|
||||
'lrange',
|
||||
'expire',
|
||||
'ttl',
|
||||
],
|
||||
},
|
||||
required: {
|
||||
field: 'operation',
|
||||
value: [
|
||||
'get',
|
||||
'set',
|
||||
'delete',
|
||||
'hset',
|
||||
'hget',
|
||||
'hgetall',
|
||||
'incr',
|
||||
'incrby',
|
||||
'exists',
|
||||
'setnx',
|
||||
'lpush',
|
||||
'lrange',
|
||||
'expire',
|
||||
'ttl',
|
||||
],
|
||||
},
|
||||
},
|
||||
// Value field (Get/Set/HSET/LPUSH)
|
||||
{
|
||||
id: 'value',
|
||||
title: 'Value',
|
||||
type: 'long-input',
|
||||
placeholder: 'Value to store',
|
||||
condition: { field: 'operation', value: ['set', 'setnx', 'hset', 'lpush'] },
|
||||
required: { field: 'operation', value: ['set', 'setnx', 'hset', 'lpush'] },
|
||||
},
|
||||
// Expiration for SET
|
||||
{
|
||||
id: 'ex',
|
||||
title: 'Expiration (seconds)',
|
||||
type: 'short-input',
|
||||
placeholder: 'Optional TTL in seconds',
|
||||
condition: { field: 'operation', value: 'set' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Hash field (HSET/HGET)
|
||||
{
|
||||
id: 'field',
|
||||
title: 'Field',
|
||||
type: 'short-input',
|
||||
placeholder: 'Hash field name',
|
||||
condition: { field: 'operation', value: ['hset', 'hget'] },
|
||||
required: { field: 'operation', value: ['hset', 'hget'] },
|
||||
},
|
||||
// Pattern for KEYS
|
||||
{
|
||||
id: 'pattern',
|
||||
title: 'Pattern',
|
||||
type: 'short-input',
|
||||
placeholder: '* (all keys) or user:* (prefix match)',
|
||||
condition: { field: 'operation', value: 'keys' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Seconds for EXPIRE
|
||||
{
|
||||
id: 'seconds',
|
||||
title: 'Seconds',
|
||||
type: 'short-input',
|
||||
placeholder: 'Timeout in seconds',
|
||||
condition: { field: 'operation', value: 'expire' },
|
||||
required: { field: 'operation', value: 'expire' },
|
||||
},
|
||||
// Increment for INCRBY
|
||||
{
|
||||
id: 'increment',
|
||||
title: 'Increment',
|
||||
type: 'short-input',
|
||||
placeholder: 'Amount to increment by (negative to decrement)',
|
||||
condition: { field: 'operation', value: 'incrby' },
|
||||
required: { field: 'operation', value: 'incrby' },
|
||||
},
|
||||
// Start/Stop for LRANGE
|
||||
{
|
||||
id: 'start',
|
||||
title: 'Start Index',
|
||||
type: 'short-input',
|
||||
placeholder: '0',
|
||||
condition: { field: 'operation', value: 'lrange' },
|
||||
required: { field: 'operation', value: 'lrange' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
{
|
||||
id: 'stop',
|
||||
title: 'Stop Index',
|
||||
type: 'short-input',
|
||||
placeholder: '-1 (all elements)',
|
||||
condition: { field: 'operation', value: 'lrange' },
|
||||
required: { field: 'operation', value: 'lrange' },
|
||||
mode: 'advanced',
|
||||
},
|
||||
// Command for raw Redis
|
||||
{
|
||||
id: 'command',
|
||||
title: 'Command',
|
||||
type: 'code',
|
||||
placeholder: '["HSET", "myhash", "field1", "value1"]',
|
||||
condition: { field: 'operation', value: 'command' },
|
||||
required: { field: 'operation', value: 'command' },
|
||||
},
|
||||
],
|
||||
tools: {
|
||||
access: [
|
||||
'upstash_redis_get',
|
||||
'upstash_redis_set',
|
||||
'upstash_redis_delete',
|
||||
'upstash_redis_keys',
|
||||
'upstash_redis_command',
|
||||
'upstash_redis_hset',
|
||||
'upstash_redis_hget',
|
||||
'upstash_redis_hgetall',
|
||||
'upstash_redis_incr',
|
||||
'upstash_redis_expire',
|
||||
'upstash_redis_ttl',
|
||||
'upstash_redis_lpush',
|
||||
'upstash_redis_lrange',
|
||||
'upstash_redis_exists',
|
||||
'upstash_redis_setnx',
|
||||
'upstash_redis_incrby',
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
if (params.ex) {
|
||||
params.ex = Number(params.ex)
|
||||
}
|
||||
if (params.seconds !== undefined) {
|
||||
params.seconds = Number(params.seconds)
|
||||
}
|
||||
if (params.start !== undefined) {
|
||||
params.start = Number(params.start)
|
||||
}
|
||||
if (params.stop !== undefined) {
|
||||
params.stop = Number(params.stop)
|
||||
}
|
||||
if (params.increment !== undefined) {
|
||||
params.increment = Number(params.increment)
|
||||
}
|
||||
return `upstash_redis_${params.operation}`
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
operation: { type: 'string', description: 'Redis operation to perform' },
|
||||
restUrl: { type: 'string', description: 'Upstash Redis REST URL' },
|
||||
restToken: { type: 'string', description: 'Upstash Redis REST token' },
|
||||
key: { type: 'string', description: 'Redis key' },
|
||||
value: { type: 'string', description: 'Value to store' },
|
||||
ex: { type: 'number', description: 'Expiration time in seconds (SET)' },
|
||||
field: { type: 'string', description: 'Hash field name (HSET/HGET)' },
|
||||
pattern: { type: 'string', description: 'Pattern to match keys (KEYS)' },
|
||||
seconds: { type: 'number', description: 'Timeout in seconds (EXPIRE)' },
|
||||
start: { type: 'number', description: 'Start index (LRANGE)' },
|
||||
stop: { type: 'number', description: 'Stop index (LRANGE)' },
|
||||
command: { type: 'string', description: 'Redis command as JSON array (Command)' },
|
||||
increment: { type: 'number', description: 'Amount to increment by (INCRBY)' },
|
||||
},
|
||||
outputs: {
|
||||
value: { type: 'json', description: 'Retrieved value (Get, HGET, INCR, INCRBY operations)' },
|
||||
result: {
|
||||
type: 'json',
|
||||
description: 'Operation result (Set, HSET, EXPIRE, Command operations)',
|
||||
},
|
||||
deletedCount: { type: 'number', description: 'Number of keys deleted (Delete operation)' },
|
||||
keys: { type: 'array', description: 'List of keys matching the pattern (Keys operation)' },
|
||||
count: { type: 'number', description: 'Number of items found (Keys, LRANGE operations)' },
|
||||
key: { type: 'string', description: 'The key operated on' },
|
||||
fields: {
|
||||
type: 'json',
|
||||
description: 'Hash field-value pairs keyed by field name (HGETALL operation)',
|
||||
},
|
||||
fieldCount: { type: 'number', description: 'Number of fields in the hash (HGETALL operation)' },
|
||||
field: { type: 'string', description: 'Hash field name (HSET, HGET operations)' },
|
||||
ttl: {
|
||||
type: 'number',
|
||||
description:
|
||||
'Remaining TTL in seconds. Positive integer if TTL set, -1 if no expiration, -2 if key does not exist.',
|
||||
},
|
||||
length: { type: 'number', description: 'List length after push (LPUSH operation)' },
|
||||
values: {
|
||||
type: 'array',
|
||||
description: 'List elements in the specified range (LRANGE operation)',
|
||||
},
|
||||
command: { type: 'string', description: 'The command that was executed (Command operation)' },
|
||||
pattern: { type: 'string', description: 'The pattern used to match keys (Keys operation)' },
|
||||
exists: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the key exists (true) or not (false) (EXISTS operation)',
|
||||
},
|
||||
wasSet: {
|
||||
type: 'boolean',
|
||||
description: 'Whether the key was set (true) or already existed (false) (SETNX operation)',
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -13,6 +13,7 @@ const VISION_MODEL_OPTIONS = [
|
||||
{ label: 'Claude Opus 4.5', id: 'claude-opus-4-5' },
|
||||
{ label: 'Claude Sonnet 4.5', id: 'claude-sonnet-4-5' },
|
||||
{ label: 'Claude Haiku 4.5', id: 'claude-haiku-4-5' },
|
||||
{ label: 'Gemini 3.1 Pro Preview', id: 'gemini-3.1-pro-preview' },
|
||||
{ label: 'Gemini 3 Pro Preview', id: 'gemini-3-pro-preview' },
|
||||
{ label: 'Gemini 3 Flash Preview', id: 'gemini-3-flash-preview' },
|
||||
{ label: 'Gemini 2.5 Pro', id: 'gemini-2.5-pro' },
|
||||
|
||||
@@ -64,11 +64,6 @@ export const WikipediaBlock: BlockConfig<WikipediaResponse> = {
|
||||
access: ['wikipedia_summary', 'wikipedia_search', 'wikipedia_content', 'wikipedia_random'],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert searchLimit to a number for search operation
|
||||
if (params.searchLimit) {
|
||||
params.searchLimit = Number(params.searchLimit)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'wikipedia_summary':
|
||||
return 'wikipedia_summary'
|
||||
@@ -82,6 +77,11 @@ export const WikipediaBlock: BlockConfig<WikipediaResponse> = {
|
||||
return 'wikipedia_summary'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.searchLimit) result.searchLimit = Number(params.searchLimit)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -442,11 +442,6 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
],
|
||||
config: {
|
||||
tool: (params) => {
|
||||
// Convert numeric parameters
|
||||
if (params.maxResults) {
|
||||
params.maxResults = Number(params.maxResults)
|
||||
}
|
||||
|
||||
switch (params.operation) {
|
||||
case 'youtube_search':
|
||||
return 'youtube_search'
|
||||
@@ -470,6 +465,11 @@ Return ONLY the timestamp string - no explanations, no quotes, no extra text.`,
|
||||
return 'youtube_search'
|
||||
}
|
||||
},
|
||||
params: (params) => {
|
||||
const result: Record<string, unknown> = {}
|
||||
if (params.maxResults) result.maxResults = Number(params.maxResults)
|
||||
return result
|
||||
},
|
||||
},
|
||||
},
|
||||
inputs: {
|
||||
|
||||
@@ -3,6 +3,7 @@ import { AgentBlock } from '@/blocks/blocks/agent'
|
||||
import { AhrefsBlock } from '@/blocks/blocks/ahrefs'
|
||||
import { AirtableBlock } from '@/blocks/blocks/airtable'
|
||||
import { AirweaveBlock } from '@/blocks/blocks/airweave'
|
||||
import { AlgoliaBlock } from '@/blocks/blocks/algolia'
|
||||
import { ApiBlock } from '@/blocks/blocks/api'
|
||||
import { ApiTriggerBlock } from '@/blocks/blocks/api_trigger'
|
||||
import { ApifyBlock } from '@/blocks/blocks/apify'
|
||||
@@ -54,6 +55,7 @@ import { GrafanaBlock } from '@/blocks/blocks/grafana'
|
||||
import { GrainBlock } from '@/blocks/blocks/grain'
|
||||
import { GreptileBlock } from '@/blocks/blocks/greptile'
|
||||
import { GuardrailsBlock } from '@/blocks/blocks/guardrails'
|
||||
import { HexBlock } from '@/blocks/blocks/hex'
|
||||
import { HubSpotBlock } from '@/blocks/blocks/hubspot'
|
||||
import { HuggingFaceBlock } from '@/blocks/blocks/huggingface'
|
||||
import { HumanInTheLoopBlock } from '@/blocks/blocks/human_in_the_loop'
|
||||
@@ -108,9 +110,11 @@ import { PulseBlock, PulseV2Block } from '@/blocks/blocks/pulse'
|
||||
import { QdrantBlock } from '@/blocks/blocks/qdrant'
|
||||
import { RDSBlock } from '@/blocks/blocks/rds'
|
||||
import { RedditBlock } from '@/blocks/blocks/reddit'
|
||||
import { RedisBlock } from '@/blocks/blocks/redis'
|
||||
import { ReductoBlock, ReductoV2Block } from '@/blocks/blocks/reducto'
|
||||
import { ResendBlock } from '@/blocks/blocks/resend'
|
||||
import { ResponseBlock } from '@/blocks/blocks/response'
|
||||
import { RevenueCatBlock } from '@/blocks/blocks/revenuecat'
|
||||
import { RouterBlock, RouterV2Block } from '@/blocks/blocks/router'
|
||||
import { RssBlock } from '@/blocks/blocks/rss'
|
||||
import { S3Block } from '@/blocks/blocks/s3'
|
||||
@@ -136,7 +140,6 @@ import { StarterBlock } from '@/blocks/blocks/starter'
|
||||
import { StripeBlock } from '@/blocks/blocks/stripe'
|
||||
import { SttBlock, SttV2Block } from '@/blocks/blocks/stt'
|
||||
import { SupabaseBlock } from '@/blocks/blocks/supabase'
|
||||
import { TableBlock } from '@/blocks/blocks/table'
|
||||
import { TavilyBlock } from '@/blocks/blocks/tavily'
|
||||
import { TelegramBlock } from '@/blocks/blocks/telegram'
|
||||
import { TextractBlock, TextractV2Block } from '@/blocks/blocks/textract'
|
||||
@@ -148,6 +151,7 @@ import { TtsBlock } from '@/blocks/blocks/tts'
|
||||
import { TwilioSMSBlock } from '@/blocks/blocks/twilio'
|
||||
import { TwilioVoiceBlock } from '@/blocks/blocks/twilio_voice'
|
||||
import { TypeformBlock } from '@/blocks/blocks/typeform'
|
||||
import { UpstashBlock } from '@/blocks/blocks/upstash'
|
||||
import { VariablesBlock } from '@/blocks/blocks/variables'
|
||||
import { VercelBlock } from '@/blocks/blocks/vercel'
|
||||
import { VideoGeneratorBlock, VideoGeneratorV2Block } from '@/blocks/blocks/video_generator'
|
||||
@@ -175,6 +179,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
ahrefs: AhrefsBlock,
|
||||
airtable: AirtableBlock,
|
||||
airweave: AirweaveBlock,
|
||||
algolia: AlgoliaBlock,
|
||||
api: ApiBlock,
|
||||
api_trigger: ApiTriggerBlock,
|
||||
apify: ApifyBlock,
|
||||
@@ -236,6 +241,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
grain: GrainBlock,
|
||||
greptile: GreptileBlock,
|
||||
guardrails: GuardrailsBlock,
|
||||
hex: HexBlock,
|
||||
hubspot: HubSpotBlock,
|
||||
huggingface: HuggingFaceBlock,
|
||||
human_in_the_loop: HumanInTheLoopBlock,
|
||||
@@ -293,10 +299,12 @@ export const registry: Record<string, BlockConfig> = {
|
||||
qdrant: QdrantBlock,
|
||||
rds: RDSBlock,
|
||||
reddit: RedditBlock,
|
||||
redis: RedisBlock,
|
||||
reducto: ReductoBlock,
|
||||
reducto_v2: ReductoV2Block,
|
||||
resend: ResendBlock,
|
||||
response: ResponseBlock,
|
||||
revenuecat: RevenueCatBlock,
|
||||
router: RouterBlock,
|
||||
router_v2: RouterV2Block,
|
||||
rss: RssBlock,
|
||||
@@ -324,7 +332,8 @@ export const registry: Record<string, BlockConfig> = {
|
||||
stt: SttBlock,
|
||||
stt_v2: SttV2Block,
|
||||
supabase: SupabaseBlock,
|
||||
table: TableBlock,
|
||||
// TODO: Uncomment when working on tables
|
||||
// table: TableBlock,
|
||||
tavily: TavilyBlock,
|
||||
telegram: TelegramBlock,
|
||||
textract: TextractBlock,
|
||||
@@ -337,6 +346,7 @@ export const registry: Record<string, BlockConfig> = {
|
||||
twilio_sms: TwilioSMSBlock,
|
||||
twilio_voice: TwilioVoiceBlock,
|
||||
typeform: TypeformBlock,
|
||||
upstash: UpstashBlock,
|
||||
vercel: VercelBlock,
|
||||
variables: VariablesBlock,
|
||||
video_generator: VideoGeneratorBlock,
|
||||
|
||||
@@ -1157,6 +1157,17 @@ export function AirweaveIcon(props: SVGProps<SVGSVGElement>) {
|
||||
)
|
||||
}
|
||||
|
||||
export function AlgoliaIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 50 50'>
|
||||
<path
|
||||
fill='#FFFFFF'
|
||||
d='M25,0C11.3,0,0.2,11,0,24.6C-0.2,38.4,11,49.9,24.8,50c4.3,0,8.4-1,12-3c0.4-0.2,0.4-0.7,0.1-1l-2.3-2.1 c-0.5-0.4-1.2-0.5-1.7-0.3c-2.5,1.1-5.3,1.6-8.2,1.6c-11.2-0.1-20.2-9.4-20-20.6C4.9,13.6,13.9,4.7,25,4.7h20.3v36L33.7,30.5 c-0.4-0.3-0.9-0.3-1.2,0.1c-1.8,2.4-4.9,4-8.2,3.7c-4.6-0.3-8.4-4-8.7-8.7c-0.4-5.5,4-10.2,9.4-10.2c4.9,0,9,3.8,9.4,8.6 c0,0.4,0.2,0.8,0.6,1.1l3,2.7c0.3,0.3,0.9,0.1,1-0.3c0.2-1.2,0.3-2.4,0.2-3.6c-0.5-7-6.2-12.7-13.2-13.1c-8.1-0.5-14.8,5.8-15,13.7 c-0.2,7.7,6.1,14.4,13.8,14.5c3.2,0.1,6.2-0.9,8.6-2.7l15,13.3c0.6,0.6,1.7,0.1,1.7-0.7v-48C50,0.4,49.5,0,49,0L25,0 C25,0,25,0,25,0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function GoogleBooksIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 478.633 540.068'>
|
||||
@@ -5737,3 +5748,86 @@ export function CloudflareIcon(props: SVGProps<SVGSVGElement>) {
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function UpstashIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 256 341' width='24' height='24'>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M0 298.417c56.554 56.553 148.247 56.553 204.801 0c56.554-56.554 56.554-148.247 0-204.801l-25.6 25.6c42.415 42.416 42.415 111.185 0 153.6c-42.416 42.416-111.185 42.416-153.601 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M51.2 247.216c28.277 28.277 74.123 28.277 102.4 0c28.277-28.276 28.277-74.123 0-102.4l-25.6 25.6c14.14 14.138 14.14 37.061 0 51.2c-14.138 14.139-37.061 14.139-51.2 0zM256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#00C98D'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M256 42.415c-56.554-56.553-148.247-56.553-204.8 0c-56.555 56.555-56.555 148.247 0 204.801l25.599-25.6c-42.415-42.415-42.415-111.185 0-153.6c42.416-42.416 111.185-42.416 153.6 0z'
|
||||
/>
|
||||
<path
|
||||
fill='#FFF'
|
||||
fillOpacity='.4'
|
||||
d='M204.8 93.616c-28.276-28.277-74.124-28.277-102.4 0c-28.278 28.277-28.278 74.123 0 102.4l25.6-25.6c-14.14-14.138-14.14-37.061 0-51.2c14.138-14.139 37.06-14.139 51.2 0z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RevenueCatIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
width='512'
|
||||
height='512'
|
||||
viewBox='0 0 512 512'
|
||||
fill='none'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
>
|
||||
<path
|
||||
d='M95 109.774C110.152 106.108 133.612 104 154.795 104C212.046 104 246.32 123.928 246.32 174.646C246.32 205.746 233.737 226.264 214.005 237.437L261.765 318.946C258.05 321.632 250.035 323.176 238.864 323.176C226.282 323.176 217.987 321.672 211.982 318.946L172.225 248.3H167.645C157.789 248.305 147.945 247.601 138.18 246.192V319.255C134.172 321.672 127.022 323.176 116.73 323.176C106.73 323.176 99.2874 321.659 95 319.255V109.774ZM137.643 207.848C145.772 209.263 153.997 209.968 162.235 209.956C187.12 209.956 202.285 200.556 202.285 177.057C202.285 152.886 186.268 142.949 157.668 142.949C150.956 142.918 144.255 143.515 137.643 144.735V207.848Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
<path
|
||||
d='M428.529 329.244C428.529 365.526 410.145 375.494 396.306 382.195C360.972 399.32 304.368 379.4 244.206 373.338C189.732 366.214 135.706 361.522 127.309 373.738C124.152 376.832 123.481 386.798 127.309 390.862C138.604 402.85 168.061 394.493 188.919 390.714C195.391 389.694 201.933 392.099 206.079 397.021C210.226 401.944 211.349 408.637 209.024 414.58C206.699 420.522 201.28 424.811 194.809 425.831C185.379 427.264 175.85 427.989 166.306 428C145.988 428 120.442 424.495 105.943 409.072C98.7232 401.4 91.3266 387.78 97.0271 366.465C107.875 326.074 172.807 336.052 248.033 343.633C300.41 348.907 357.23 366.465 379.934 350.343C385.721 346.234 396.517 337.022 390.698 329.244C384.879 321.467 375.353 325.684 362.838 325.684C300.152 325.684 263.238 285.302 263.238 217.916C263.247 167.292 284.176 131.892 318.287 115.09C333.109 107.789 350.421 104 369.587 104C386.292 104 403.269 106.931 414.11 113.366C420.847 123.032 423.778 140.305 422.306 153.201C408.247 146.466 395.36 142.949 378.669 142.949C337.365 142.949 308.947 164.039 308.947 214.985C308.947 265.932 337.065 286.149 376.611 286.149C387.869 286.035 403.1 284.67 422.306 282.053C426.455 297.498 428.529 313.228 428.529 329.244Z'
|
||||
fill='#FFFFFF'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function RedisIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg
|
||||
{...props}
|
||||
viewBox='0 0 512 512'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
fillRule='evenodd'
|
||||
clipRule='evenodd'
|
||||
strokeLinejoin='round'
|
||||
strokeMiterlimit='2'
|
||||
>
|
||||
<path
|
||||
d='M479.14 279.864c-34.584 43.578-71.94 93.385-146.645 93.385-66.73 0-91.59-58.858-93.337-106.672 14.62 30.915 43.203 55.949 87.804 54.792C412.737 318.6 471.53 241.127 471.53 170.57c0-84.388-62.947-145.262-172.24-145.262-78.165 0-175.004 29.743-238.646 76.782-.689 48.42 26.286 111.369 35.972 104.452 55.17-39.67 98.918-65.203 141.35-78.01C175.153 198.58 24.451 361.219 6 389.85c2.076 26.286 34.588 96.842 50.496 96.842 4.841 0 8.993-2.768 13.835-7.61 45.433-51.046 82.472-96.816 115.412-140.933 4.627 64.658 36.42 143.702 125.307 143.702 79.55 0 158.408-57.414 194.377-186.767 4.149-15.911-15.22-28.362-26.286-15.22zm-90.616-104.449c0 40.81-40.118 60.87-76.782 60.87-19.596 0-34.648-5.145-46.554-11.832 21.906-33.168 43.59-67.182 66.887-103.593 41.08 6.953 56.449 29.788 56.449 54.555z'
|
||||
fill='#FFFFFF'
|
||||
fillRule='nonzero'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
export function HexIcon(props: SVGProps<SVGSVGElement>) {
|
||||
return (
|
||||
<svg {...props} xmlns='http://www.w3.org/2000/svg' viewBox='0 0 1450.3 600'>
|
||||
<path
|
||||
fill='#5F509D'
|
||||
fillRule='evenodd'
|
||||
d='m250.11,0v199.49h-50V0H0v600h200.11v-300.69h50v300.69h200.18V0h-200.18Zm249.9,0v600h450.29v-250.23h-200.2v149h-50v-199.46h250.2V0h-450.29Zm200.09,199.49v-99.49h50v99.49h-50Zm550.02,0V0h200.18v150l-100,100.09,100,100.09v249.82h-200.18v-300.69h-50v300.69h-200.11v-249.82l100.11-100.09-100.11-100.09V0h200.11v199.49h50Z'
|
||||
/>
|
||||
</svg>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -295,6 +295,12 @@ export function AccessControl() {
|
||||
category: 'Sidebar',
|
||||
configKey: 'hideKnowledgeBaseTab' as const,
|
||||
},
|
||||
{
|
||||
id: 'hide-tables',
|
||||
label: 'Tables',
|
||||
category: 'Sidebar',
|
||||
configKey: 'hideTablesTab' as const,
|
||||
},
|
||||
{
|
||||
id: 'hide-templates',
|
||||
label: 'Templates',
|
||||
@@ -949,6 +955,7 @@ export function AccessControl() {
|
||||
onClick={() => {
|
||||
const allVisible =
|
||||
!editingConfig?.hideKnowledgeBaseTab &&
|
||||
!editingConfig?.hideTablesTab &&
|
||||
!editingConfig?.hideTemplates &&
|
||||
!editingConfig?.hideCopilot &&
|
||||
!editingConfig?.hideApiKeysTab &&
|
||||
@@ -969,6 +976,7 @@ export function AccessControl() {
|
||||
? {
|
||||
...prev,
|
||||
hideKnowledgeBaseTab: allVisible,
|
||||
hideTablesTab: allVisible,
|
||||
hideTemplates: allVisible,
|
||||
hideCopilot: allVisible,
|
||||
hideApiKeysTab: allVisible,
|
||||
@@ -990,6 +998,7 @@ export function AccessControl() {
|
||||
}}
|
||||
>
|
||||
{!editingConfig?.hideKnowledgeBaseTab &&
|
||||
!editingConfig?.hideTablesTab &&
|
||||
!editingConfig?.hideTemplates &&
|
||||
!editingConfig?.hideCopilot &&
|
||||
!editingConfig?.hideApiKeysTab &&
|
||||
|
||||
@@ -15,6 +15,7 @@ const {
|
||||
allowedModelProviders: null,
|
||||
hideTraceSpans: false,
|
||||
hideKnowledgeBaseTab: false,
|
||||
hideTablesTab: false,
|
||||
hideCopilot: false,
|
||||
hideApiKeysTab: false,
|
||||
hideEnvironmentTab: false,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { loggerMock } from '@sim/testing'
|
||||
import { loggerMock, requestUtilsMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { BlockType } from '@/executor/constants'
|
||||
import { ConditionBlockHandler } from '@/executor/handlers/condition/condition-handler'
|
||||
@@ -7,9 +7,7 @@ import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
}))
|
||||
vi.mock('@/lib/core/utils/request', () => requestUtilsMock)
|
||||
|
||||
vi.mock('@/tools', () => ({
|
||||
executeTool: vi.fn(),
|
||||
|
||||
@@ -7,7 +7,11 @@ import { BlockResolver } from '@/executor/variables/resolvers/block'
|
||||
import { EnvResolver } from '@/executor/variables/resolvers/env'
|
||||
import { LoopResolver } from '@/executor/variables/resolvers/loop'
|
||||
import { ParallelResolver } from '@/executor/variables/resolvers/parallel'
|
||||
import type { ResolutionContext, Resolver } from '@/executor/variables/resolvers/reference'
|
||||
import {
|
||||
RESOLVED_EMPTY,
|
||||
type ResolutionContext,
|
||||
type Resolver,
|
||||
} from '@/executor/variables/resolvers/reference'
|
||||
import { WorkflowResolver } from '@/executor/variables/resolvers/workflow'
|
||||
import type { SerializedBlock, SerializedWorkflow } from '@/serializer/types'
|
||||
|
||||
@@ -104,7 +108,11 @@ export class VariableResolver {
|
||||
loopScope,
|
||||
}
|
||||
|
||||
return this.resolveReference(trimmed, resolutionContext)
|
||||
const result = this.resolveReference(trimmed, resolutionContext)
|
||||
if (result === RESOLVED_EMPTY) {
|
||||
return null
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -174,6 +182,13 @@ export class VariableResolver {
|
||||
return match
|
||||
}
|
||||
|
||||
if (resolved === RESOLVED_EMPTY) {
|
||||
if (blockType === BlockType.FUNCTION) {
|
||||
return this.blockResolver.formatValueForBlock(null, blockType, language)
|
||||
}
|
||||
return ''
|
||||
}
|
||||
|
||||
return this.blockResolver.formatValueForBlock(resolved, blockType, language)
|
||||
} catch (error) {
|
||||
replacementError = error instanceof Error ? error : new Error(String(error))
|
||||
@@ -207,7 +222,6 @@ export class VariableResolver {
|
||||
|
||||
let replacementError: Error | null = null
|
||||
|
||||
// Use generic utility for smart variable reference replacement
|
||||
let result = replaceValidReferences(template, (match) => {
|
||||
if (replacementError) return match
|
||||
|
||||
@@ -217,6 +231,10 @@ export class VariableResolver {
|
||||
return match
|
||||
}
|
||||
|
||||
if (resolved === RESOLVED_EMPTY) {
|
||||
return 'null'
|
||||
}
|
||||
|
||||
if (typeof resolved === 'string') {
|
||||
const escaped = resolved.replace(/\\/g, '\\\\').replace(/'/g, "\\'")
|
||||
return `'${escaped}'`
|
||||
|
||||
@@ -2,7 +2,7 @@ import { loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
import { ExecutionState } from '@/executor/execution/state'
|
||||
import { BlockResolver } from './block'
|
||||
import type { ResolutionContext } from './reference'
|
||||
import { RESOLVED_EMPTY, type ResolutionContext } from './reference'
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
vi.mock('@/blocks/registry', async () => {
|
||||
@@ -134,15 +134,18 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<source.items.1.id>', ctx)).toBe(2)
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for non-existent path when no schema defined', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { existing: 'value' },
|
||||
})
|
||||
it.concurrent(
|
||||
'should return RESOLVED_EMPTY for non-existent path when no schema defined',
|
||||
() => {
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { existing: 'value' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.nonexistent>', ctx)).toBeUndefined()
|
||||
})
|
||||
expect(resolver.resolve('<source.nonexistent>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should throw error for path not in output schema', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
@@ -162,7 +165,7 @@ describe('BlockResolver', () => {
|
||||
expect(() => resolver.resolve('<source.invalidField>', ctx)).toThrow(/Available fields:/)
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for path in schema but missing in data', () => {
|
||||
it.concurrent('should return RESOLVED_EMPTY for path in schema but missing in data', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
{
|
||||
id: 'source',
|
||||
@@ -175,7 +178,7 @@ describe('BlockResolver', () => {
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.stdout>', ctx)).toBe('log output')
|
||||
expect(resolver.resolve('<source.result>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<source.result>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
})
|
||||
|
||||
it.concurrent(
|
||||
@@ -191,7 +194,7 @@ describe('BlockResolver', () => {
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<workflow.childTraceSpans>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<workflow.childTraceSpans>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -208,7 +211,7 @@ describe('BlockResolver', () => {
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<workflowinput.childTraceSpans>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<workflowinput.childTraceSpans>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -225,13 +228,13 @@ describe('BlockResolver', () => {
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
|
||||
expect(resolver.resolve('<hitl.response>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<hitl.submission>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<hitl.resumeInput>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<hitl.response>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
expect(resolver.resolve('<hitl.submission>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
expect(resolver.resolve('<hitl.resumeInput>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
}
|
||||
)
|
||||
|
||||
it.concurrent('should return undefined for non-existent block', () => {
|
||||
it.concurrent('should return undefined for block not in workflow', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'existing' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {})
|
||||
@@ -239,6 +242,21 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<nonexistent>', ctx)).toBeUndefined()
|
||||
})
|
||||
|
||||
it.concurrent('should return RESOLVED_EMPTY for block in workflow that did not execute', () => {
|
||||
const workflow = createTestWorkflow([
|
||||
{ id: 'start-block', name: 'Start', type: 'start_trigger' },
|
||||
{ id: 'slack-block', name: 'Slack', type: 'slack_trigger' },
|
||||
])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
'slack-block': { message: 'hello from slack' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<slack.message>', ctx)).toBe('hello from slack')
|
||||
expect(resolver.resolve('<start>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
expect(resolver.resolve('<start.input>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
})
|
||||
|
||||
it.concurrent('should fall back to context blockStates', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
@@ -1012,24 +1030,24 @@ describe('BlockResolver', () => {
|
||||
expect(resolver.resolve('<source.other>', ctx)).toBe('exists')
|
||||
})
|
||||
|
||||
it.concurrent('should handle output with undefined values', () => {
|
||||
it.concurrent('should return RESOLVED_EMPTY for output with undefined values', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { value: undefined, other: 'exists' },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.value>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<source.value>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
})
|
||||
|
||||
it.concurrent('should return undefined for deeply nested non-existent path', () => {
|
||||
it.concurrent('should return RESOLVED_EMPTY for deeply nested non-existent path', () => {
|
||||
const workflow = createTestWorkflow([{ id: 'source', type: 'unknown_block_type' }])
|
||||
const resolver = new BlockResolver(workflow)
|
||||
const ctx = createTestContext('current', {
|
||||
source: { level1: { level2: {} } },
|
||||
})
|
||||
|
||||
expect(resolver.resolve('<source.level1.level2.level3>', ctx)).toBeUndefined()
|
||||
expect(resolver.resolve('<source.level1.level2.level3>', ctx)).toBe(RESOLVED_EMPTY)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user