mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-11 16:08:04 -05:00
Compare commits
25 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95a8d6410c | ||
|
|
08720d926c | ||
|
|
308f39e8b9 | ||
|
|
5b1f948686 | ||
|
|
cb17691c01 | ||
|
|
c00b18594e | ||
|
|
95efae9035 | ||
|
|
b12e415fea | ||
|
|
510ce4b7da | ||
|
|
abed816afd | ||
|
|
6f390c0d1d | ||
|
|
9c12ddf491 | ||
|
|
27ef45f717 | ||
|
|
0414aa5f6d | ||
|
|
93f68a9092 | ||
|
|
727e5e8763 | ||
|
|
4964495abb | ||
|
|
40d3ce5e10 | ||
|
|
a251122601 | ||
|
|
9e8d2f7c7d | ||
|
|
eeb1a340b2 | ||
|
|
c91c132e88 | ||
|
|
5028930b9f | ||
|
|
b565babe1f | ||
|
|
2f57d8a884 |
123
.github/workflows/build.yml
vendored
123
.github/workflows/build.yml
vendored
@@ -7,17 +7,43 @@ on:
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest-8-cores
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
# AMD64 builds on x86 runners
|
||||
- dockerfile: ./docker/app.Dockerfile
|
||||
image: ghcr.io/simstudioai/simstudio
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
runner: linux-x64-8-core
|
||||
- dockerfile: ./docker/db.Dockerfile
|
||||
image: ghcr.io/simstudioai/migrations
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
runner: linux-x64-8-core
|
||||
- dockerfile: ./docker/realtime.Dockerfile
|
||||
image: ghcr.io/simstudioai/realtime
|
||||
platform: linux/amd64
|
||||
arch: amd64
|
||||
runner: linux-x64-8-core
|
||||
# ARM64 builds on native ARM64 runners
|
||||
- dockerfile: ./docker/app.Dockerfile
|
||||
image: ghcr.io/simstudioai/simstudio
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runner: linux-arm64-8-core
|
||||
- dockerfile: ./docker/db.Dockerfile
|
||||
image: ghcr.io/simstudioai/migrations
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runner: linux-arm64-8-core
|
||||
- dockerfile: ./docker/realtime.Dockerfile
|
||||
image: ghcr.io/simstudioai/realtime
|
||||
platform: linux/arm64
|
||||
arch: arm64
|
||||
runner: linux-arm64-8-core
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -26,9 +52,6 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
@@ -41,6 +64,55 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ matrix.image }}
|
||||
tags: |
|
||||
type=raw,value=latest-${{ matrix.arch }},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=ref,event=pr,suffix=-${{ matrix.arch }}
|
||||
type=semver,pattern={{version}},suffix=-${{ matrix.arch }}
|
||||
type=semver,pattern={{major}}.{{minor}},suffix=-${{ matrix.arch }}
|
||||
type=semver,pattern={{major}}.{{minor}}.{{patch}},suffix=-${{ matrix.arch }}
|
||||
type=sha,format=long,suffix=-${{ matrix.arch }}
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,scope=build-v2
|
||||
cache-to: type=gha,mode=max,scope=build-v2
|
||||
provenance: false
|
||||
sbom: false
|
||||
|
||||
create-manifests:
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-and-push
|
||||
if: github.event_name != 'pull_request'
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- image: ghcr.io/simstudioai/simstudio
|
||||
- image: ghcr.io/simstudioai/migrations
|
||||
- image: ghcr.io/simstudioai/realtime
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for manifest
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
@@ -53,14 +125,35 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}}.{{patch}}
|
||||
type=sha,format=long
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
- name: Create and push manifest
|
||||
run: |
|
||||
# Extract the tags from metadata (these are the final manifest tags we want)
|
||||
MANIFEST_TAGS="${{ steps.meta.outputs.tags }}"
|
||||
|
||||
# Create manifest for each tag
|
||||
for manifest_tag in $MANIFEST_TAGS; do
|
||||
echo "Creating manifest for $manifest_tag"
|
||||
|
||||
# The architecture-specific images have -amd64 and -arm64 suffixes
|
||||
amd64_image="${manifest_tag}-amd64"
|
||||
arm64_image="${manifest_tag}-arm64"
|
||||
|
||||
echo "Looking for images: $amd64_image and $arm64_image"
|
||||
|
||||
# Check if both architecture images exist
|
||||
if docker manifest inspect "$amd64_image" >/dev/null 2>&1 && docker manifest inspect "$arm64_image" >/dev/null 2>&1; then
|
||||
echo "Both images found, creating manifest..."
|
||||
docker manifest create "$manifest_tag" \
|
||||
"$amd64_image" \
|
||||
"$arm64_image"
|
||||
docker manifest push "$manifest_tag"
|
||||
echo "Successfully created and pushed manifest for $manifest_tag"
|
||||
else
|
||||
echo "Error: One or both architecture images not found"
|
||||
echo "Checking AMD64 image: $amd64_image"
|
||||
docker manifest inspect "$amd64_image" || echo "AMD64 image not found"
|
||||
echo "Checking ARM64 image: $arm64_image"
|
||||
docker manifest inspect "$arm64_image" || echo "ARM64 image not found"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -65,4 +65,7 @@ start-collector.sh
|
||||
.turbo
|
||||
|
||||
# VSCode
|
||||
.vscode
|
||||
.vscode
|
||||
|
||||
## Helm Chart Tests
|
||||
helm/sim/test
|
||||
@@ -1,4 +1,3 @@
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
AgentIcon,
|
||||
ApiIcon,
|
||||
@@ -7,7 +6,8 @@ import {
|
||||
ConditionalIcon,
|
||||
ConnectIcon,
|
||||
ResponseIcon,
|
||||
} from '../icons'
|
||||
} from '@/components/icons'
|
||||
import { cn } from '@/lib/utils'
|
||||
|
||||
// Custom Feature component specifically for BlockTypes to handle the 6-item layout
|
||||
const BlockFeature = ({
|
||||
|
||||
30
apps/docs/components/ui/video.tsx
Normal file
30
apps/docs/components/ui/video.tsx
Normal file
@@ -0,0 +1,30 @@
|
||||
import { getVideoUrl } from '@/lib/utils'
|
||||
|
||||
interface VideoProps {
|
||||
src: string
|
||||
className?: string
|
||||
autoPlay?: boolean
|
||||
loop?: boolean
|
||||
muted?: boolean
|
||||
playsInline?: boolean
|
||||
}
|
||||
|
||||
export function Video({
|
||||
src,
|
||||
className = 'w-full -mb-2 rounded-lg',
|
||||
autoPlay = true,
|
||||
loop = true,
|
||||
muted = true,
|
||||
playsInline = true,
|
||||
}: VideoProps) {
|
||||
return (
|
||||
<video
|
||||
autoPlay={autoPlay}
|
||||
loop={loop}
|
||||
muted={muted}
|
||||
playsInline={playsInline}
|
||||
className={className}
|
||||
src={getVideoUrl(src)}
|
||||
/>
|
||||
)
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Step, Steps } from 'fumadocs-ui/components/steps'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { ThemeImage } from '@/components/ui/theme-image'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
The Evaluator block uses AI to score and assess content quality based on metrics you define. Perfect for quality control, A/B testing, and ensuring your AI outputs meet specific standards.
|
||||
|
||||
@@ -63,7 +64,7 @@ Choose an AI model to perform the evaluation:
|
||||
**Local Models**: Any model running on Ollama
|
||||
|
||||
<div className="w-full max-w-2xl mx-auto overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/models.mp4"></video>
|
||||
<Video src="models.mp4" />
|
||||
</div>
|
||||
|
||||
**Recommendation**: Use models with strong reasoning capabilities like GPT-4o or Claude 3.7 Sonnet for more accurate evaluations.
|
||||
|
||||
@@ -7,11 +7,12 @@ import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { Step, Steps } from 'fumadocs-ui/components/steps'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { BlockTypes } from '@/components/ui/block-types'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
Blocks are the building components you connect together to create AI workflows. Think of them as specialized modules that each handle a specific task—from chatting with AI models to making API calls or processing data.
|
||||
|
||||
<div className="w-full max-w-2xl mx-auto overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/connections.mp4"></video>
|
||||
<Video src="connections.mp4" />
|
||||
</div>
|
||||
|
||||
## Core Block Types
|
||||
@@ -62,7 +63,7 @@ You create workflows by connecting blocks together. The output of one block beco
|
||||
- **Branching paths**: Some blocks can route to different paths based on conditions
|
||||
|
||||
<div className="w-full max-w-2xl mx-auto overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/connections.mp4"></video>
|
||||
<Video src="connections.mp4" />
|
||||
</div>
|
||||
|
||||
## Common Patterns
|
||||
|
||||
@@ -8,6 +8,7 @@ import { Step, Steps } from 'fumadocs-ui/components/steps'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { Accordion, Accordions } from 'fumadocs-ui/components/accordion'
|
||||
import { ThemeImage } from '@/components/ui/theme-image'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
The Router block uses AI to intelligently decide which path your workflow should take next. Unlike Condition blocks that use simple rules, Router blocks can understand context and make smart routing decisions based on content analysis.
|
||||
|
||||
@@ -103,7 +104,7 @@ Choose an AI model to power the routing decision:
|
||||
**Local Models**: Any model running on Ollama
|
||||
|
||||
<div className="w-full max-w-2xl mx-auto overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/router-model-dropdown.mp4"></video>
|
||||
<Video src="router-model-dropdown.mp4" />
|
||||
</div>
|
||||
|
||||
**Recommendation**: Use models with strong reasoning capabilities like GPT-4o or Claude 3.7 Sonnet for more accurate routing decisions.
|
||||
|
||||
@@ -6,6 +6,7 @@ description: Connect your blocks to one another.
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Card, Cards } from 'fumadocs-ui/components/card'
|
||||
import { ConnectIcon } from '@/components/icons'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
Connections are the pathways that allow data to flow between blocks in your workflow. They define how information is passed from one block to another, enabling you to create sophisticated, multi-step processes.
|
||||
|
||||
@@ -15,7 +16,7 @@ Connections are the pathways that allow data to flow between blocks in your work
|
||||
</Callout>
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/connections.mp4"></video>
|
||||
<Video src="connections.mp4" />
|
||||
</div>
|
||||
|
||||
## Connection Types
|
||||
|
||||
@@ -4,11 +4,12 @@ description: Using connection tags to reference data between blocks
|
||||
---
|
||||
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
Connection tags are visual representations of the data available from connected blocks. They provide an easy way to reference outputs from previous blocks in your workflow.
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/connections.mp4"></video>
|
||||
<Video src="connections.mp4" />
|
||||
</div>
|
||||
|
||||
### What Are Connection Tags?
|
||||
|
||||
@@ -20,6 +20,7 @@ import {
|
||||
LoopIcon,
|
||||
ParallelIcon,
|
||||
} from '@/components/icons'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
When you run a workflow in Sim Studio, the execution engine follows a systematic process to ensure blocks are executed in the correct order with proper data flow.
|
||||
|
||||
@@ -162,7 +163,7 @@ Run workflows on-demand through the Sim Studio interface by clicking the "Run" b
|
||||
- Workflows that need human supervision
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/input-format.mp4"></video>
|
||||
<Video src="input-format.mp4" />
|
||||
</div>
|
||||
|
||||
### Scheduled Execution
|
||||
@@ -175,7 +176,7 @@ Configure workflows to run automatically on a specified schedule:
|
||||
- Set minimum and maximum execution intervals
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/configure-schedule.mp4"></video>
|
||||
<Video src="configure-schedule.mp4" />
|
||||
</div>
|
||||
|
||||
### API Endpoints
|
||||
@@ -188,7 +189,7 @@ Each workflow can be exposed as an API endpoint:
|
||||
- Receive execution results as JSON responses
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/api-deployment.mp4"></video>
|
||||
<Video src="api-deployment.mp4" />
|
||||
</div>
|
||||
|
||||
#### Viewing Deployed APIs
|
||||
@@ -196,7 +197,7 @@ Each workflow can be exposed as an API endpoint:
|
||||
Monitor your deployed workflow APIs and their current state:
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/api-redeployment.mp4"></video>
|
||||
<Video src="api-redeployment.mp4" />
|
||||
</div>
|
||||
|
||||
This shows how to view the deployed state and compare with the original deployed API configuration.
|
||||
@@ -211,7 +212,7 @@ Configure workflows to execute in response to external events:
|
||||
- Support for specialized webhooks (GitHub, Stripe, etc.)
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/webhooks.mp4"></video>
|
||||
<Video src="webhooks.mp4" />
|
||||
</div>
|
||||
|
||||
<Callout type="info">
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
PerplexityIcon,
|
||||
SlackIcon,
|
||||
} from '@/components/icons'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
This tutorial will guide you through building your first AI workflow in Sim Studio. We'll create a people research agent that can find information about individuals using state-of-the-art LLM-Search tools.
|
||||
|
||||
@@ -63,7 +64,7 @@ A people research agent that:
|
||||
- **User Prompt**: Drag the connection from the Start block's output into this field (this connects `<start.input>` to the user prompt)
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/static/examples/started/started-2.mp4"></video>
|
||||
<Video src="examples/started-2.mp4" />
|
||||
</div>
|
||||
</Step>
|
||||
|
||||
@@ -77,7 +78,7 @@ A people research agent that:
|
||||
- Add your API keys for both tools (this allows the agent to search the web and access additional information)
|
||||
|
||||
<div className="mx-auto w-3/5 overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/static/examples/started/started-3.mp4"></video>
|
||||
<Video src="examples/started-3.mp4" />
|
||||
</div>
|
||||
</Step>
|
||||
|
||||
@@ -92,7 +93,7 @@ A people research agent that:
|
||||
You should see the agent's response analyzing the person described in your text.
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/static/examples/started/started-4.mp4"></video>
|
||||
<Video src="examples/started-4.mp4" />
|
||||
</div>
|
||||
</Step>
|
||||
|
||||
@@ -105,7 +106,7 @@ A people research agent that:
|
||||
- The AI will generate a JSON schema for you automatically
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/static/examples/started/started-5.mp4"></video>
|
||||
<Video src="examples/started-5.mp4" />
|
||||
</div>
|
||||
</Step>
|
||||
|
||||
@@ -120,7 +121,7 @@ A people research agent that:
|
||||
You should now see structured JSON output with the person's information organized into location, profession, and education fields.
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/static/examples/started/started-6.mp4"></video>
|
||||
<Video src="examples/started-6.mp4" />
|
||||
</div>
|
||||
</Step>
|
||||
</Steps>
|
||||
|
||||
138
apps/docs/content/docs/tools/arxiv.mdx
Normal file
138
apps/docs/content/docs/tools/arxiv.mdx
Normal file
@@ -0,0 +1,138 @@
|
||||
---
|
||||
title: ArXiv
|
||||
description: Search and retrieve academic papers from ArXiv
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="arxiv"
|
||||
color="#E0E0E0"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon" id='logomark' xmlns='http://www.w3.org/2000/svg' viewBox='0 0 17.732 24.269'>
|
||||
<g id='tiny'>
|
||||
<path
|
||||
d='M573.549,280.916l2.266,2.738,6.674-7.84c.353-.47.52-.717.353-1.117a1.218,1.218,0,0,0-1.061-.748h0a.953.953,0,0,0-.712.262Z'
|
||||
transform='translate(-566.984 -271.548)'
|
||||
fill='#bdb9b4'
|
||||
/>
|
||||
<path
|
||||
d='M579.525,282.225l-10.606-10.174a1.413,1.413,0,0,0-.834-.5,1.09,1.09,0,0,0-1.027.66c-.167.4-.047.681.319,1.206l8.44,10.242h0l-6.282,7.716a1.336,1.336,0,0,0-.323,1.3,1.114,1.114,0,0,0,1.04.69A.992.992,0,0,0,571,293l8.519-7.92A1.924,1.924,0,0,0,579.525,282.225Z'
|
||||
transform='translate(-566.984 -271.548)'
|
||||
fill='#b31b1b'
|
||||
/>
|
||||
<path
|
||||
d='M584.32,293.912l-8.525-10.275,0,0L573.53,280.9l-1.389,1.254a2.063,2.063,0,0,0,0,2.965l10.812,10.419a.925.925,0,0,0,.742.282,1.039,1.039,0,0,0,.953-.667A1.261,1.261,0,0,0,584.32,293.912Z'
|
||||
transform='translate(-566.984 -271.548)'
|
||||
fill='#bdb9b4'
|
||||
/>
|
||||
</g>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[ArXiv](https://arxiv.org/) is a free, open-access repository of scientific research papers in fields such as physics, mathematics, computer science, quantitative biology, quantitative finance, statistics, electrical engineering, systems science, and economics. ArXiv provides a vast collection of preprints and published articles, making it a primary resource for researchers and practitioners worldwide.
|
||||
|
||||
With ArXiv, you can:
|
||||
|
||||
- **Search for academic papers**: Find research by keywords, author names, titles, categories, and more
|
||||
- **Retrieve paper metadata**: Access abstracts, author lists, publication dates, and other bibliographic information
|
||||
- **Download full-text PDFs**: Obtain the complete text of most papers for in-depth study
|
||||
- **Explore author contributions**: View all papers by a specific author
|
||||
- **Stay up-to-date**: Discover the latest submissions and trending topics in your field
|
||||
|
||||
In Sim Studio, the ArXiv integration enables your agents to programmatically search, retrieve, and analyze scientific papers from ArXiv. This allows you to automate literature reviews, build research assistants, or incorporate up-to-date scientific knowledge into your agentic workflows. Use ArXiv as a dynamic data source for research, discovery, and knowledge extraction within your Sim Studio projects.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search for academic papers, retrieve metadata, download papers, and access the vast collection of scientific research on ArXiv.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `arxiv_search`
|
||||
|
||||
Search for academic papers on ArXiv by keywords, authors, titles, or other fields.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Yes | The search query to execute |
|
||||
| `searchField` | string | No | Field to search in: all, ti \(title\), au \(author\), abs \(abstract\), co \(comment\), jr \(journal\), cat \(category\), rn \(report number\) |
|
||||
| `maxResults` | number | No | Maximum number of results to return \(default: 10, max: 2000\) |
|
||||
| `sortBy` | string | No | Sort by: relevance, lastUpdatedDate, submittedDate \(default: relevance\) |
|
||||
| `sortOrder` | string | No | Sort order: ascending, descending \(default: descending\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `query` | string |
|
||||
| `papers` | string |
|
||||
| `totalResults` | string |
|
||||
|
||||
### `arxiv_get_paper`
|
||||
|
||||
Get detailed information about a specific ArXiv paper by its ID.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `paperId` | string | Yes | ArXiv paper ID \(e.g., |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `paper` | string |
|
||||
|
||||
### `arxiv_get_author_papers`
|
||||
|
||||
Search for papers by a specific author on ArXiv.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `authorName` | string | Yes | Author name to search for |
|
||||
| `maxResults` | number | No | Maximum number of results to return \(default: 10, max: 2000\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `authorPapers` | string |
|
||||
| `authorName` | string |
|
||||
| `totalResults` | string |
|
||||
|
||||
|
||||
|
||||
## Block Configuration
|
||||
|
||||
### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `operation` | string | Yes | Operation |
|
||||
|
||||
|
||||
|
||||
### Outputs
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `papers` | json | papers output from the block |
|
||||
| `totalResults` | number | totalResults output from the block |
|
||||
| `paper` | json | paper output from the block |
|
||||
| `authorPapers` | json | authorPapers output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `arxiv`
|
||||
@@ -26,7 +26,7 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Exa](https://exa.ai/) is an AI-powered search engine designed specifically for developers and researchers that provides highly relevant and up-to-date information from across the web. It combines advanced semantic search capabilities with AI understanding to deliver more accurate and contextually relevant results than traditional search engines.
|
||||
[Exa](https://exa.ai/) is an AI-powered search engine designed specifically for developers and researchers, providing highly relevant and up-to-date information from across the web. It combines advanced semantic search capabilities with AI understanding to deliver more accurate and contextually relevant results than traditional search engines.
|
||||
|
||||
With Exa, you can:
|
||||
|
||||
@@ -35,14 +35,16 @@ With Exa, you can:
|
||||
- **Access up-to-date information**: Retrieve current information from across the web
|
||||
- **Find similar content**: Discover related resources based on content similarity
|
||||
- **Extract webpage contents**: Retrieve and process the full text of web pages
|
||||
- **Answer questions with citations**: Ask questions and receive direct answers with supporting sources
|
||||
- **Perform research tasks**: Automate multi-step research workflows to gather, synthesize, and summarize information
|
||||
|
||||
In Sim Studio, the Exa integration allows your agents to search the web for information, retrieve content from specific URLs, and find similar resources - all programmatically through API calls. This enables your agents to access real-time information from the internet, enhancing their ability to provide accurate, current, and relevant responses. The integration is particularly valuable for research tasks, information gathering, content discovery, and answering questions that require up-to-date information from across the web.
|
||||
In Sim Studio, the Exa integration allows your agents to search the web for information, retrieve content from specific URLs, find similar resources, answer questions with citations, and conduct research tasks—all programmatically through API calls. This enables your agents to access real-time information from the internet, enhancing their ability to provide accurate, current, and relevant responses. The integration is particularly valuable for research tasks, information gathering, content discovery, and answering questions that require up-to-date information from across the web.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Search the web, retrieve content, find similar links, and answer questions using Exa
|
||||
Search the web, retrieve content, find similar links, and answer questions using Exa's powerful AI search capabilities.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -44,8 +44,16 @@ With Firecrawl in Sim Studio, you can:
|
||||
- **Handle JavaScript-heavy sites**: Process content from modern web applications that rely on JavaScript
|
||||
- **Filter content**: Focus on specific parts of a page using CSS selectors
|
||||
- **Process at scale**: Handle high-volume scraping needs with a reliable API
|
||||
- **Search the web**: Perform intelligent web searches and retrieve structured results
|
||||
- **Crawl entire sites**: Crawl multiple pages from a website and aggregate their content
|
||||
|
||||
The Firecrawl integration allows your agents to access and process web content programmatically without leaving the Sim Studio environment. This enables scenarios like research, content aggregation, data extraction, and information analysis from across the web. Your agents can gather information from websites, extract structured data, and use that information to make decisions or generate insights - all without having to navigate the complexities of raw HTML parsing or browser automation. Simply configure the Firecrawl block with your API key, provide the target URL, and your agents can immediately begin working with web content in a clean, structured format.
|
||||
In Sim Studio, the Firecrawl integration enables your agents to access and process web content programmatically as part of their workflows. Supported operations include:
|
||||
|
||||
- **Scrape**: Extract structured content (Markdown, HTML, metadata) from a single web page.
|
||||
- **Search**: Search the web for information using Firecrawl's intelligent search capabilities.
|
||||
- **Crawl**: Crawl multiple pages from a website, returning structured content and metadata for each page.
|
||||
|
||||
This allows your agents to gather information from websites, extract structured data, and use that information to make decisions or generate insights—all without having to navigate the complexities of raw HTML parsing or browser automation. Simply configure the Firecrawl block with your API key, select the operation (Scrape, Search, or Crawl), and provide the relevant parameters. Your agents can immediately begin working with web content in a clean, structured format.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
|
||||
@@ -90,7 +90,7 @@ In Sim Studio, the Google Calendar integration enables your agents to programmat
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Integrate Google Calendar functionality to create, read, update, and list calendar events within your workflow. Automate scheduling, check availability, and manage events using OAuth authentication. Email invitations are sent asynchronously and delivery depends on recipients
|
||||
Integrate Google Calendar functionality to create, read, update, and list calendar events within your workflow. Automate scheduling, check availability, and manage events using OAuth authentication. Email invitations are sent asynchronously and delivery depends on recipients' Google Calendar settings.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ In Sim Studio, the DALL-E integration enables your agents to generate images pro
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Create high-quality images using OpenAI
|
||||
Create high-quality images using OpenAI's image generation models. Configure resolution, quality, style, and other parameters to get exactly the image you need.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ This integration is particularly valuable for building agents that need to gathe
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Transform web content into clean, readable text using Jina AI
|
||||
Transform web content into clean, readable text using Jina AI's advanced extraction capabilities. Extract meaningful content from websites while preserving important information and optionally gathering links.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"items": [
|
||||
"index",
|
||||
"airtable",
|
||||
"arxiv",
|
||||
"browser_use",
|
||||
"clay",
|
||||
"confluence",
|
||||
@@ -53,6 +54,7 @@
|
||||
"wealthbox",
|
||||
"webhook",
|
||||
"whatsapp",
|
||||
"wikipedia",
|
||||
"x",
|
||||
"youtube"
|
||||
]
|
||||
|
||||
@@ -29,7 +29,17 @@ With Notion, you can:
|
||||
- **Connect information**: Link between pages and databases to create a knowledge network
|
||||
- **Access anywhere**: Use Notion across web, desktop, and mobile platforms with automatic syncing
|
||||
|
||||
In Sim Studio, the Notion integration enables your agents to interact directly with your Notion workspace programmatically. This allows for powerful automation scenarios such as knowledge management, content creation, and information retrieval. Your agents can read existing Notion pages to extract information, write to pages to update content, and create new pages from scratch. This integration bridges the gap between your AI workflows and your knowledge base, enabling seamless documentation and information management. By connecting Sim Studio with Notion, you can automate documentation processes, maintain up-to-date information repositories, generate reports, and organize information intelligently - all through your intelligent agents.
|
||||
In Sim Studio, the Notion integration enables your agents to interact directly with your Notion workspace programmatically. This allows for powerful automation scenarios such as knowledge management, content creation, and information retrieval. Your agents can:
|
||||
|
||||
- **Read Notion pages**: Extract content and metadata from any Notion page.
|
||||
- **Read Notion databases**: Retrieve database structure and information.
|
||||
- **Write to pages**: Append new content to existing Notion pages.
|
||||
- **Create new pages**: Generate new Notion pages under a parent page, with custom titles and content.
|
||||
- **Query databases**: Search and filter database entries using advanced filter and sort criteria.
|
||||
- **Search workspace**: Search across your entire Notion workspace for pages or databases matching specific queries.
|
||||
- **Create new databases**: Programmatically create new databases with custom properties and structure.
|
||||
|
||||
This integration bridges the gap between your AI workflows and your knowledge base, enabling seamless documentation and information management. By connecting Sim Studio with Notion, you can automate documentation processes, maintain up-to-date information repositories, generate reports, and organize information intelligently—all through your intelligent agents.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ In Sim Studio, the OpenAI integration enables your agents to leverage these powe
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Convert text into numerical vector representations using OpenAI
|
||||
Convert text into numerical vector representations using OpenAI's embedding models. Transform text data into embeddings for semantic search, clustering, and other vector-based operations.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ In Sim Studio, the Pinecone integration enables your agents to leverage vector s
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Store, search, and retrieve vector embeddings using Pinecone
|
||||
Store, search, and retrieve vector embeddings using Pinecone's specialized vector database. Generate embeddings from text and perform semantic similarity searches with customizable filtering options.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -80,6 +80,27 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Qdrant](https://qdrant.tech) is an open-source vector database designed for efficient storage, management, and retrieval of high-dimensional vector embeddings. Qdrant enables fast and scalable semantic search, making it ideal for AI applications that require similarity search, recommendation systems, and contextual information retrieval.
|
||||
|
||||
With Qdrant, you can:
|
||||
|
||||
- **Store vector embeddings**: Efficiently manage and persist high-dimensional vectors at scale
|
||||
- **Perform semantic similarity search**: Find the most similar vectors to a query vector in real time
|
||||
- **Filter and organize data**: Use advanced filtering to narrow down search results based on metadata or payload
|
||||
- **Fetch specific points**: Retrieve vectors and their associated payloads by ID
|
||||
- **Scale seamlessly**: Handle large collections and high-throughput workloads
|
||||
|
||||
In Sim Studio, the Qdrant integration enables your agents to interact with Qdrant programmatically as part of their workflows. Supported operations include:
|
||||
|
||||
- **Upsert**: Insert or update points (vectors and payloads) in a Qdrant collection
|
||||
- **Search**: Perform similarity search to find vectors most similar to a given query vector, with optional filtering and result customization
|
||||
- **Fetch**: Retrieve specific points from a collection by their IDs, with options to include payloads and vectors
|
||||
|
||||
This integration allows your agents to leverage powerful vector search and management capabilities, enabling advanced automation scenarios such as semantic search, recommendation, and contextual retrieval. By connecting Sim Studio with Qdrant, you can build agents that understand context, retrieve relevant information from large datasets, and deliver more intelligent and personalized responses—all without managing complex infrastructure.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Store, search, and retrieve vector embeddings using Qdrant. Perform semantic similarity searches and manage your vector collections.
|
||||
|
||||
@@ -26,19 +26,14 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Reddit](https://www.reddit.com/) is a vast social news aggregation, content rating, and discussion platform where registered users submit content such as text posts, images, and links, which are then voted up or down by other members. Known as "the front page of the internet," Reddit is organized into thousands of communities called subreddits, each focused on a specific topic.
|
||||
[Reddit](https://www.reddit.com/) is a social platform where users share and discuss content in topic-based communities called subreddits.
|
||||
|
||||
With Reddit, you can:
|
||||
In Sim Studio, you can use the Reddit integration to:
|
||||
|
||||
- **Access diverse content**: Browse thousands of specialized communities covering virtually every topic
|
||||
- **Stay informed**: Get real-time updates on trending news, discussions, and viral content
|
||||
- **Engage with communities**: Participate in discussions with like-minded individuals
|
||||
- **Discover trending topics**: See what's popular across different interest groups
|
||||
- **Gather insights**: Collect opinions, feedback, and perspectives from diverse user groups
|
||||
- **Monitor public sentiment**: Track reactions and discussions around specific topics or brands
|
||||
- **Research niche topics**: Access specialized knowledge in dedicated communities
|
||||
- **Get Posts**: Retrieve posts from any subreddit, with options to sort (Hot, New, Top, Rising) and filter Top posts by time (Day, Week, Month, Year, All Time).
|
||||
- **Get Comments**: Fetch comments from a specific post, with options to sort and set the number of comments.
|
||||
|
||||
In Sim Studio, the Reddit integration enables your agents to programmatically access and analyze content from Reddit's vast ecosystem. This allows for powerful automation scenarios such as trend monitoring, content aggregation, and sentiment analysis. Your agents can retrieve popular posts from specific subreddits, extract valuable information, and incorporate these insights into their workflows. This integration bridges the gap between social media monitoring and your AI workflows, enabling more informed decision-making based on public discussions and trending topics. By connecting Sim Studio with Reddit, you can create agents that stay on top of relevant conversations, identify emerging trends, gather diverse perspectives, and deliver timely insights - all without requiring manual browsing of countless Reddit threads.
|
||||
These operations let your agents access and analyze Reddit content as part of your automated workflows.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
|
||||
@@ -78,7 +78,7 @@ In Sim Studio, the Serper integration enables your agents to leverage the power
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Access real-time web search results with Serper
|
||||
Access real-time web search results with Serper's Google Search API integration. Retrieve structured search data including web pages, news, images, and places with customizable language and region settings.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -49,14 +49,22 @@ With Slack, you can:
|
||||
- **Automate agent notifications**: Send real-time updates from your Sim Studio agents to any Slack channel
|
||||
- **Create webhook endpoints**: Configure Slack bots as webhooks to trigger Sim Studio workflows from Slack activities
|
||||
- **Enhance agent workflows**: Integrate Slack messaging into your agents to deliver results, alerts, and status updates
|
||||
- **Create and share Slack canvases**: Programmatically generate collaborative documents (canvases) in Slack channels
|
||||
- **Read messages from channels**: Retrieve and process recent messages from any Slack channel for monitoring or workflow triggers
|
||||
|
||||
In Sim Studio, the Slack integration enables your agents to programmatically send messages to any Slack channel or user as part of their workflows. This allows for powerful automation scenarios such as sending notifications, alerts, updates, and reports directly to your team's communication hub. Your agents can deliver timely information, share results from processes they've completed, or alert team members when attention is needed. This integration bridges the gap between your AI workflows and your team's communication, ensuring everyone stays informed without manual intervention. By connecting Sim Studio with Slack, you can create agents that keep your team updated with relevant information at the right time, enhance collaboration by sharing insights automatically, and reduce the need for manual status updates - all while leveraging your existing Slack workspace where your team already communicates.
|
||||
In Sim Studio, the Slack integration enables your agents to programmatically interact with Slack in several ways as part of their workflows:
|
||||
|
||||
- **Send messages**: Agents can send formatted messages to any Slack channel or user, supporting Slack's mrkdwn syntax for rich formatting.
|
||||
- **Create canvases**: Agents can create and share Slack canvases (collaborative documents) directly in channels, enabling richer content sharing and documentation.
|
||||
- **Read messages**: Agents can read recent messages from channels, allowing for monitoring, reporting, or triggering further actions based on channel activity.
|
||||
|
||||
This allows for powerful automation scenarios such as sending notifications, alerts, updates, and reports directly to your team's communication hub, sharing structured documents, or monitoring conversations for workflow triggers. Your agents can deliver timely information, share results from processes they've completed, create collaborative documents, or alert team members when attention is needed. This integration bridges the gap between your AI workflows and your team's communication, ensuring everyone stays informed without manual intervention. By connecting Sim Studio with Slack, you can create agents that keep your team updated with relevant information at the right time, enhance collaboration by sharing insights automatically, and reduce the need for manual status updates—all while leveraging your existing Slack workspace where your team already communicates.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack
|
||||
Comprehensive Slack integration with OAuth authentication. Send formatted messages using Slack's mrkdwn syntax.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -51,19 +51,26 @@ import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Supabase](https://www.supabase.com/) is an open-source Firebase alternative that provides a suite of tools for building modern applications. It offers a PostgreSQL database, authentication, instant APIs, real-time subscriptions, storage, and edge functions, all within a unified platform.
|
||||
[Supabase](https://www.supabase.com/) is a powerful open-source backend-as-a-service platform that provides developers with a suite of tools to build, scale, and manage modern applications. Supabase offers a fully managed [PostgreSQL](https://www.postgresql.org/) database, robust authentication, instant RESTful and GraphQL APIs, real-time subscriptions, file storage, and edge functions—all accessible through a unified and developer-friendly interface. Its open-source nature and compatibility with popular frameworks make it a compelling alternative to Firebase, with the added benefit of SQL flexibility and transparency.
|
||||
|
||||
With Supabase, you can:
|
||||
**Why Supabase?**
|
||||
- **Instant APIs:** Every table and view in your database is instantly available via REST and GraphQL endpoints, making it easy to build data-driven applications without writing custom backend code.
|
||||
- **Real-time Data:** Supabase enables real-time subscriptions, allowing your apps to react instantly to changes in your database.
|
||||
- **Authentication & Authorization:** Built-in user management with support for email, OAuth, SSO, and more, plus row-level security for granular access control.
|
||||
- **Storage:** Securely upload, serve, and manage files with built-in storage that integrates seamlessly with your database.
|
||||
- **Edge Functions:** Deploy serverless functions close to your users for low-latency custom logic.
|
||||
|
||||
- **Manage relational data**: Work with a powerful PostgreSQL database with full SQL capabilities
|
||||
- **Implement authentication**: Add secure user authentication with multiple providers
|
||||
- **Create instant APIs**: Generate RESTful APIs automatically based on your database schema
|
||||
- **Enable real-time updates**: Subscribe to database changes and build reactive applications
|
||||
- **Store files**: Upload, transform, and serve files with storage buckets
|
||||
- **Deploy serverless functions**: Run code in response to database changes or HTTP requests
|
||||
- **Secure your application**: Implement row-level security and manage permissions
|
||||
**Using Supabase in Sim Studio**
|
||||
|
||||
In Sim Studio, the Supabase integration enables your agents to interact with your Supabase projects programmatically. This allows for powerful automation scenarios such as data querying, record creation, user management, and file operations. Your agents can retrieve information from your database, insert new records, update existing data, and leverage Supabase's authentication and storage capabilities as part of their workflows. This integration bridges the gap between your AI workflows and your application's data layer, enabling more sophisticated and data-driven automations. By connecting Sim Studio with Supabase, you can create agents that maintain data consistency across systems, trigger actions based on database changes, perform complex data operations, and build workflows that leverage your application's existing data infrastructure - all without requiring manual intervention or custom code.
|
||||
Sim Studio’s Supabase integration makes it effortless to connect your agentic workflows to your Supabase projects. With just a few configuration fields—your Project ID, Table name, and Service Role Secret—you can securely interact with your database directly from your Sim Studio blocks. The integration abstracts away the complexity of API calls, letting you focus on building logic and automations.
|
||||
|
||||
**Key benefits of using Supabase in Sim Studio:**
|
||||
- **No-code/low-code database operations:** Query, insert, update, and delete rows in your Supabase tables without writing SQL or backend code.
|
||||
- **Flexible querying:** Use [PostgREST filter syntax](https://postgrest.org/en/stable/api.html#operators) to perform advanced queries, including filtering, ordering, and limiting results.
|
||||
- **Seamless integration:** Easily connect Supabase to other tools and services in your workflow, enabling powerful automations such as syncing data, triggering notifications, or enriching records.
|
||||
- **Secure and scalable:** All operations use your Supabase Service Role Secret, ensuring secure access to your data with the scalability of a managed cloud platform.
|
||||
|
||||
Whether you’re building internal tools, automating business processes, or powering production applications, Supabase in Sim Studio provides a fast, reliable, and developer-friendly way to manage your data and backend logic—no infrastructure management required. Simply configure your block, select the operation you need, and let Sim Studio handle the rest.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
|
||||
@@ -58,7 +58,7 @@ In Sim Studio, the Tavily integration enables your agents to search the web and
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Access Tavily
|
||||
Access Tavily's AI-powered search engine to find relevant information from across the web. Extract and process content from specific URLs with customizable depth options.
|
||||
|
||||
|
||||
|
||||
|
||||
179
apps/docs/content/docs/tools/wikipedia.mdx
Normal file
179
apps/docs/content/docs/tools/wikipedia.mdx
Normal file
@@ -0,0 +1,179 @@
|
||||
---
|
||||
title: Wikipedia
|
||||
description: Search and retrieve content from Wikipedia
|
||||
---
|
||||
|
||||
import { BlockInfoCard } from "@/components/ui/block-info-card"
|
||||
|
||||
<BlockInfoCard
|
||||
type="wikipedia"
|
||||
color="#000000"
|
||||
icon={true}
|
||||
iconSvg={`<svg className="block-icon"
|
||||
|
||||
fill='currentColor'
|
||||
version='1.1'
|
||||
id='Capa_1'
|
||||
xmlns='http://www.w3.org/2000/svg'
|
||||
xmlnsXlink='http://www.w3.org/1999/xlink'
|
||||
|
||||
|
||||
viewBox='0 0 98.05 98.05'
|
||||
xmlSpace='preserve'
|
||||
>
|
||||
<g>
|
||||
<path
|
||||
d='M98.023,17.465l-19.584-0.056c-0.004,0.711-0.006,1.563-0.017,2.121c1.664,0.039,5.922,0.822,7.257,4.327L66.92,67.155
|
||||
c-0.919-2.149-9.643-21.528-10.639-24.02l9.072-18.818c1.873-2.863,5.455-4.709,8.918-4.843l-0.01-1.968L55.42,17.489
|
||||
c-0.045,0.499,0.001,1.548-0.068,2.069c5.315,0.144,7.215,1.334,5.941,4.508c-2.102,4.776-6.51,13.824-7.372,15.475
|
||||
c-2.696-5.635-4.41-9.972-7.345-16.064c-1.266-2.823,1.529-3.922,4.485-4.004v-1.981l-21.82-0.067
|
||||
c0.016,0.93-0.021,1.451-0.021,2.131c3.041,0.046,6.988,0.371,8.562,3.019c2.087,4.063,9.044,20.194,11.149,24.514
|
||||
c-2.685,5.153-9.207,17.341-11.544,21.913c-3.348-7.43-15.732-36.689-19.232-44.241c-1.304-3.218,3.732-5.077,6.646-5.213
|
||||
l0.019-2.148L0,17.398c0.005,0.646,0.027,1.71,0.029,2.187c4.025-0.037,9.908,6.573,11.588,10.683
|
||||
c7.244,16.811,14.719,33.524,21.928,50.349c0.002,0.029,2.256,0.059,2.281,0.008c4.717-9.653,10.229-19.797,15.206-29.56
|
||||
L63.588,80.64c0.005,0.004,2.082,0.016,2.093,0.007c7.962-18.196,19.892-46.118,23.794-54.933c1.588-3.767,4.245-6.064,8.543-6.194
|
||||
l0.032-1.956L98.023,17.465z'
|
||||
/>
|
||||
</g>
|
||||
</svg>`}
|
||||
/>
|
||||
|
||||
{/* MANUAL-CONTENT-START:intro */}
|
||||
[Wikipedia](https://www.wikipedia.org/) is the world's largest free online encyclopedia, offering millions of articles on a vast range of topics, collaboratively written and maintained by volunteers.
|
||||
|
||||
With Wikipedia, you can:
|
||||
|
||||
- **Search for articles**: Find relevant Wikipedia pages by searching for keywords or topics
|
||||
- **Get article summaries**: Retrieve concise summaries of Wikipedia pages for quick reference
|
||||
- **Access full content**: Obtain the complete content of Wikipedia articles for in-depth information
|
||||
- **Discover random articles**: Explore new topics by retrieving random Wikipedia pages
|
||||
|
||||
In Sim Studio, the Wikipedia integration enables your agents to programmatically access and interact with Wikipedia content as part of their workflows. Agents can search for articles, fetch summaries, retrieve full page content, and discover random articles, empowering your automations with up-to-date, reliable information from the world's largest encyclopedia. This integration is ideal for scenarios such as research, content enrichment, fact-checking, and knowledge discovery, allowing your agents to seamlessly incorporate Wikipedia data into their decision-making and task execution processes.
|
||||
{/* MANUAL-CONTENT-END */}
|
||||
|
||||
|
||||
## Usage Instructions
|
||||
|
||||
Access Wikipedia articles, search for pages, get summaries, retrieve full content, and discover random articles from the world's largest encyclopedia.
|
||||
|
||||
|
||||
|
||||
## Tools
|
||||
|
||||
### `wikipedia_summary`
|
||||
|
||||
Get a summary and metadata for a specific Wikipedia page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pageTitle` | string | Yes | Title of the Wikipedia page to get summary for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `summary` | string |
|
||||
| `title` | string |
|
||||
| `displaytitle` | string |
|
||||
| `description` | string |
|
||||
| `extract` | string |
|
||||
| `extract_html` | string |
|
||||
| `thumbnail` | string |
|
||||
| `originalimage` | string |
|
||||
| `content_urls` | string |
|
||||
| `revisions` | string |
|
||||
| `edit` | string |
|
||||
| `talk` | string |
|
||||
|
||||
### `wikipedia_search`
|
||||
|
||||
Search for Wikipedia pages by title or content.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `query` | string | Yes | Search query to find Wikipedia pages |
|
||||
| `searchLimit` | number | No | Maximum number of results to return \(default: 10, max: 50\) |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `totalHits` | string |
|
||||
| `query` | string |
|
||||
| `searchResults` | string |
|
||||
|
||||
### `wikipedia_content`
|
||||
|
||||
Get the full HTML content of a Wikipedia page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `pageTitle` | string | Yes | Title of the Wikipedia page to get content for |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `content` | string |
|
||||
| `pageid` | string |
|
||||
| `html` | string |
|
||||
| `revision` | string |
|
||||
| `tid` | string |
|
||||
| `timestamp` | string |
|
||||
| `content_model` | string |
|
||||
| `content_format` | string |
|
||||
|
||||
### `wikipedia_random`
|
||||
|
||||
Get a random Wikipedia page.
|
||||
|
||||
#### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
|
||||
#### Output
|
||||
|
||||
| Parameter | Type |
|
||||
| --------- | ---- |
|
||||
| `randomPage` | string |
|
||||
| `title` | string |
|
||||
| `displaytitle` | string |
|
||||
| `description` | string |
|
||||
| `extract` | string |
|
||||
| `thumbnail` | string |
|
||||
| `content_urls` | string |
|
||||
|
||||
|
||||
|
||||
## Block Configuration
|
||||
|
||||
### Input
|
||||
|
||||
| Parameter | Type | Required | Description |
|
||||
| --------- | ---- | -------- | ----------- |
|
||||
| `operation` | string | Yes | Operation |
|
||||
|
||||
|
||||
|
||||
### Outputs
|
||||
|
||||
| Output | Type | Description |
|
||||
| ------ | ---- | ----------- |
|
||||
| `summary` | json | summary output from the block |
|
||||
| `searchResults` | json | searchResults output from the block |
|
||||
| `totalHits` | number | totalHits output from the block |
|
||||
| `content` | json | content output from the block |
|
||||
| `randomPage` | json | randomPage output from the block |
|
||||
|
||||
|
||||
## Notes
|
||||
|
||||
- Category: `tools`
|
||||
- Type: `wikipedia`
|
||||
@@ -6,11 +6,12 @@ description: Trigger workflow execution from external webhooks
|
||||
import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { ThemeImage } from '@/components/ui/theme-image'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
The Webhook block allows external services to automatically trigger your workflow execution through HTTP webhooks.
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/webhooks.mp4"></video>
|
||||
<Video src="webhooks.mp4" />
|
||||
</div>
|
||||
|
||||
## Supported Providers
|
||||
|
||||
@@ -7,11 +7,12 @@ import { Callout } from 'fumadocs-ui/components/callout'
|
||||
import { Step, Steps } from 'fumadocs-ui/components/steps'
|
||||
import { Tab, Tabs } from 'fumadocs-ui/components/tabs'
|
||||
import { ThemeImage } from '@/components/ui/theme-image'
|
||||
import { Video } from '@/components/ui/video'
|
||||
|
||||
Variables in Sim Studio act as a global store for data that can be accessed and modified by any block in your workflow. They provide a powerful way to share information between different parts of your workflow, maintain state, and create more dynamic applications.
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/variables.mp4"></video>
|
||||
<Video src="variables.mp4" />
|
||||
</div>
|
||||
|
||||
<Callout type="info">
|
||||
@@ -57,7 +58,7 @@ Variables can be accessed from any block in your workflow using the variable dro
|
||||
3. Select the variable you want to use
|
||||
|
||||
<div className="mx-auto w-full overflow-hidden rounded-lg">
|
||||
<video autoPlay loop muted playsInline className="w-full -mb-2 rounded-lg" src="/variables-dropdown.mp4"></video>
|
||||
<Video src="variables-dropdown.mp4" />
|
||||
</div>
|
||||
|
||||
<Callout>
|
||||
|
||||
@@ -7,3 +7,25 @@ import { twMerge } from 'tailwind-merge'
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full URL for an asset stored in Vercel Blob or local fallback
|
||||
* - If CDN is configured (NEXT_PUBLIC_BLOB_BASE_URL), uses CDN URL
|
||||
* - Otherwise falls back to local static assets served from root path
|
||||
*/
|
||||
export function getAssetUrl(filename: string) {
|
||||
const cdnBaseUrl = process.env.NEXT_PUBLIC_BLOB_BASE_URL
|
||||
if (cdnBaseUrl) {
|
||||
return `${cdnBaseUrl}/${filename}`
|
||||
}
|
||||
return `/${filename}`
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the full URL for a video asset stored in Vercel Blob or local fallback
|
||||
* - If CDN is configured (NEXT_PUBLIC_BLOB_BASE_URL), uses CDN URL
|
||||
* - Otherwise falls back to local static assets served from root path
|
||||
*/
|
||||
export function getVideoUrl(filename: string) {
|
||||
return getAssetUrl(filename)
|
||||
}
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -2,7 +2,7 @@
|
||||
|
||||
import Image from 'next/image'
|
||||
import Link from 'next/link'
|
||||
import { GridPattern } from '../(landing)/components/grid-pattern'
|
||||
import { GridPattern } from '@/app/(landing)/components/grid-pattern'
|
||||
|
||||
export default function AuthLayout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
|
||||
@@ -6,7 +6,7 @@ import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { client } from '@/lib/auth-client'
|
||||
import LoginPage from './login-form'
|
||||
import LoginPage from '@/app/(auth)/login/login-form'
|
||||
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: vi.fn(),
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { client } from '@/lib/auth-client'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { SocialLoginButtons } from '@/app/(auth)/components/social-login-buttons'
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getOAuthProviderStatus } from '../components/oauth-provider-checker'
|
||||
import LoginForm from './login-form'
|
||||
import { getOAuthProviderStatus } from '@/app/(auth)/components/oauth-provider-checker'
|
||||
import LoginForm from '@/app/(auth)/login/login-form'
|
||||
|
||||
// Force dynamic rendering to avoid prerender errors with search params
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -11,8 +11,8 @@ import {
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from '@/components/ui/card'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { SetNewPasswordForm } from './reset-password-form'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { SetNewPasswordForm } from '@/app/(auth)/reset-password/reset-password-form'
|
||||
|
||||
const logger = createLogger('ResetPasswordPage')
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { env, isTruthy } from '@/lib/env'
|
||||
import { getOAuthProviderStatus } from '../components/oauth-provider-checker'
|
||||
import SignupForm from './signup-form'
|
||||
import { getOAuthProviderStatus } from '@/app/(auth)/components/oauth-provider-checker'
|
||||
import SignupForm from '@/app/(auth)/signup/signup-form'
|
||||
|
||||
// Force dynamic rendering to avoid prerender errors with search params
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -6,7 +6,7 @@ import { act, fireEvent, render, screen, waitFor } from '@testing-library/react'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { client } from '@/lib/auth-client'
|
||||
import SignupPage from './signup-form'
|
||||
import SignupPage from '@/app/(auth)/signup/signup-form'
|
||||
|
||||
vi.mock('next/navigation', () => ({
|
||||
useRouter: vi.fn(),
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { env } from '@/lib/env'
|
||||
import { isProd } from '@/lib/environment'
|
||||
import { getBaseUrl } from '@/lib/urls/utils'
|
||||
import { VerifyContent } from './verify-content'
|
||||
import { VerifyContent } from '@/app/(auth)/verify/verify-content'
|
||||
|
||||
// Force dynamic rendering to avoid prerender errors with search params
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useEffect, useState } from 'react'
|
||||
import { useRouter, useSearchParams } from 'next/navigation'
|
||||
import { client } from '@/lib/auth-client'
|
||||
import { env, isTruthy } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('useVerification')
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { Suspense, useEffect, useState } from 'react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { InputOTP, InputOTPGroup, InputOTPSlot } from '@/components/ui/input-otp'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useVerification } from './use-verification'
|
||||
import { useVerification } from '@/app/(auth)/verify/use-verification'
|
||||
|
||||
interface VerifyContentProps {
|
||||
hasResendKey: boolean
|
||||
|
||||
@@ -17,9 +17,9 @@ import ReactFlow, {
|
||||
} from 'reactflow'
|
||||
import 'reactflow/dist/style.css'
|
||||
|
||||
import { HeroBlock } from './hero-block'
|
||||
import { HeroEdge } from './hero-edge'
|
||||
import { useWindowSize } from './use-window-size'
|
||||
import { HeroBlock } from '@/app/(landing)/components/hero-block'
|
||||
import { HeroEdge } from '@/app/(landing)/components/hero-edge'
|
||||
import { useWindowSize } from '@/app/(landing)/components/use-window-size'
|
||||
|
||||
const nodeTypes: NodeTypes = { heroBlock: HeroBlock }
|
||||
const edgeTypes: EdgeTypes = { heroEdge: HeroEdge }
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
SheetTitle,
|
||||
SheetTrigger,
|
||||
} from '@/components/ui/sheet'
|
||||
import { usePrefetchOnHover } from '../utils/prefetch'
|
||||
import { usePrefetchOnHover } from '@/app/(landing)/utils/prefetch'
|
||||
|
||||
// --- Framer Motion Variants ---
|
||||
const desktopNavContainerVariants = {
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
import { useEffect, useState } from 'react'
|
||||
import { AnimatePresence, motion } from 'framer-motion'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import { getFormattedGitHubStars } from '../actions/github'
|
||||
import GitHubStarsClient from './github-stars-client'
|
||||
import NavClient from './nav-client'
|
||||
import { getFormattedGitHubStars } from '@/app/(landing)/actions/github'
|
||||
import GitHubStarsClient from '@/app/(landing)/components/github-stars-client'
|
||||
import NavClient from '@/app/(landing)/components/nav-client'
|
||||
|
||||
interface NavWrapperProps {
|
||||
onOpenTypeformLink: () => void
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
'use client'
|
||||
|
||||
import { motion } from 'framer-motion'
|
||||
import { BlogCard } from '../blog-card'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import { BlogCard } from '@/app/(landing)/components/blog-card'
|
||||
|
||||
function Blogs() {
|
||||
return (
|
||||
@@ -50,7 +51,7 @@ function Blogs() {
|
||||
date={new Date('25 April 2025')}
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Agents'
|
||||
readTime='6'
|
||||
/>
|
||||
@@ -61,7 +62,7 @@ function Blogs() {
|
||||
date={new Date('25 April 2025')}
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Agents'
|
||||
readTime='6'
|
||||
/>
|
||||
@@ -80,10 +81,10 @@ function Blogs() {
|
||||
date={new Date('25 April 2025')}
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Agents'
|
||||
readTime='6'
|
||||
image='/static/hero.png'
|
||||
image={getAssetUrl('static/hero.png')}
|
||||
/>
|
||||
<BlogCard
|
||||
href='/blog/test'
|
||||
@@ -91,7 +92,7 @@ function Blogs() {
|
||||
description="Learn how to create a fully functional AI agent using SimStudio.ai's unified API and workflows."
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Agents'
|
||||
readTime='6'
|
||||
/>
|
||||
@@ -110,7 +111,7 @@ function Blogs() {
|
||||
date={new Date('25 April 2025')}
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Agents'
|
||||
readTime='6'
|
||||
/>
|
||||
@@ -121,7 +122,7 @@ function Blogs() {
|
||||
date={new Date('25 April 2025')}
|
||||
author='Emir Ayaz'
|
||||
authorRole='Designer'
|
||||
avatar='/static/sim.png'
|
||||
avatar={getAssetUrl('static/sim.png')}
|
||||
type='Functions'
|
||||
readTime='6'
|
||||
/>
|
||||
|
||||
@@ -18,8 +18,8 @@ import 'reactflow/dist/style.css'
|
||||
|
||||
import { AgentIcon, ConnectIcon, StartIcon } from '@/components/icons'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { DotPattern } from '../dot-pattern'
|
||||
import { HeroBlock } from '../hero-block'
|
||||
import { DotPattern } from '@/app/(landing)/components/dot-pattern'
|
||||
import { HeroBlock } from '@/app/(landing)/components/hero-block'
|
||||
|
||||
// --- Types ---
|
||||
type Feature = {
|
||||
|
||||
@@ -6,8 +6,8 @@ import { useRouter } from 'next/navigation'
|
||||
import { DiscordIcon, GithubIcon, xIcon as XIcon } from '@/components/icons'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { usePrefetchOnHover } from '../../utils/prefetch'
|
||||
import useIsMobile from '../hooks/use-is-mobile'
|
||||
import useIsMobile from '@/app/(landing)/components/hooks/use-is-mobile'
|
||||
import { usePrefetchOnHover } from '@/app/(landing)/utils/prefetch'
|
||||
|
||||
function Footer() {
|
||||
const router = useRouter()
|
||||
|
||||
@@ -5,8 +5,8 @@ import { Command, CornerDownLeft } from 'lucide-react'
|
||||
import { useRouter } from 'next/navigation'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { useSession } from '@/lib/auth-client'
|
||||
import { GridPattern } from '../grid-pattern'
|
||||
import HeroWorkflowProvider from '../hero-workflow'
|
||||
import { GridPattern } from '@/app/(landing)/components/grid-pattern'
|
||||
import HeroWorkflowProvider from '@/app/(landing)/components/hero-workflow'
|
||||
|
||||
function Hero() {
|
||||
const router = useRouter()
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
import { motion } from 'framer-motion'
|
||||
import { GitBranch, RefreshCcw } from 'lucide-react'
|
||||
import ReactFlow, { ConnectionLineType, Position, ReactFlowProvider } from 'reactflow'
|
||||
import { DotPattern } from '@/app/(landing)/components/dot-pattern'
|
||||
import { HeroBlock } from '@/app/(landing)/components/hero-block'
|
||||
import { OrbitingCircles } from '@/app/(landing)/components/magicui/orbiting-circles'
|
||||
import { DotPattern } from '../dot-pattern'
|
||||
import { HeroBlock } from '../hero-block'
|
||||
|
||||
function Integrations() {
|
||||
return (
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import { motion } from 'framer-motion'
|
||||
import { getAssetUrl } from '@/lib/utils'
|
||||
import useIsMobile from '@/app/(landing)/components/hooks/use-is-mobile'
|
||||
import { Marquee } from '@/app/(landing)/components/magicui/marquee'
|
||||
import useIsMobile from '../hooks/use-is-mobile'
|
||||
|
||||
const X_TESTIMONIALS = [
|
||||
{
|
||||
@@ -10,63 +11,63 @@ const X_TESTIMONIALS = [
|
||||
username: '@GithubProjects',
|
||||
viewCount: '90.4k',
|
||||
tweetUrl: 'https://x.com/GithubProjects/status/1906383555707490499',
|
||||
profileImage: '/twitter/github-projects.jpg',
|
||||
profileImage: getAssetUrl('twitter/github-projects.jpg'),
|
||||
},
|
||||
{
|
||||
text: 'A very good looking agent workflow builder 🔥 and open source!',
|
||||
username: '@xyflowdev',
|
||||
viewCount: '3,246',
|
||||
tweetUrl: 'https://x.com/xyflowdev/status/1909501499719438670',
|
||||
profileImage: '/twitter/xyflow.jpg',
|
||||
profileImage: getAssetUrl('twitter/xyflow.jpg'),
|
||||
},
|
||||
{
|
||||
text: "🚨 BREAKING: This startup just dropped the fastest way to build AI agents.\n\nThis Figma-like canvas to build agents will blow your mind.\n\nHere's why this is the best tool for building AI agents:",
|
||||
username: '@hasantoxr',
|
||||
viewCount: '515k',
|
||||
tweetUrl: 'https://x.com/hasantoxr/status/1912909502036525271',
|
||||
profileImage: '/twitter/hasan.jpg',
|
||||
profileImage: getAssetUrl('twitter/hasan.jpg'),
|
||||
},
|
||||
{
|
||||
text: 'omfggggg this is the zapier of agent building\n\ni always believed that building agents and using ai should not be limited to technical people. i think this solves just that\n\nthe fact that this is also open source makes me so optimistic about the future of building with ai :)))\n\ncongrats @karabegemir & @typingwala !!!',
|
||||
username: '@nizzyabi',
|
||||
viewCount: '6,269',
|
||||
tweetUrl: 'https://x.com/nizzyabi/status/1907864421227180368',
|
||||
profileImage: '/twitter/nizzy.jpg',
|
||||
profileImage: getAssetUrl('twitter/nizzy.jpg'),
|
||||
},
|
||||
{
|
||||
text: "One of the best products I've seen in the space, and the hustle and grind I've seen from @karabegemir and @typingwala is insane. Sim Studio is positioned to build something game-changing, and there's no better team for the job.\n\nCongrats on the launch 🚀 🎊 great things ahead!",
|
||||
username: '@firestorm776',
|
||||
viewCount: '956',
|
||||
tweetUrl: 'https://x.com/firestorm776/status/1907896097735061598',
|
||||
profileImage: '/twitter/samarth.jpg',
|
||||
profileImage: getAssetUrl('twitter/samarth.jpg'),
|
||||
},
|
||||
{
|
||||
text: 'lfgg got access to @simstudioai via @zerodotemail 😎',
|
||||
username: '@nizzyabi',
|
||||
viewCount: '1,585',
|
||||
tweetUrl: 'https://x.com/nizzyabi/status/1910482357821595944',
|
||||
profileImage: '/twitter/nizzy.jpg',
|
||||
profileImage: getAssetUrl('twitter/nizzy.jpg'),
|
||||
},
|
||||
{
|
||||
text: 'Feels like we\'re finally getting a "Photoshop moment" for AI devs—visual, intuitive, and fast enough to keep up with ideas mid-flow.',
|
||||
username: '@syamrajk',
|
||||
viewCount: '2,643',
|
||||
tweetUrl: 'https://x.com/syamrajk/status/1912911980110946491',
|
||||
profileImage: '/twitter/syamrajk.jpg',
|
||||
profileImage: getAssetUrl('twitter/syamrajk.jpg'),
|
||||
},
|
||||
{
|
||||
text: "🚨 BREAKING: This startup just dropped the fastest way to build AI agents.\n\nThis Figma-like canvas to build agents will blow your mind.\n\nHere's why this is the best tool for building AI agents:",
|
||||
username: '@lazukars',
|
||||
viewCount: '47.4k',
|
||||
tweetUrl: 'https://x.com/lazukars/status/1913136390503600575',
|
||||
profileImage: '/twitter/lazukars.png',
|
||||
profileImage: getAssetUrl('twitter/lazukars.png'),
|
||||
},
|
||||
{
|
||||
text: 'The use cases are endless. Great work @simstudioai',
|
||||
username: '@daniel_zkim',
|
||||
viewCount: '103',
|
||||
tweetUrl: 'https://x.com/daniel_zkim/status/1907891273664782708',
|
||||
profileImage: '/twitter/daniel.jpg',
|
||||
profileImage: getAssetUrl('twitter/daniel.jpg'),
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -26,10 +26,10 @@ import {
|
||||
import { Avatar, AvatarFallback, AvatarImage } from '@/components/ui/avatar'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'
|
||||
import { GridPattern } from '../components/grid-pattern'
|
||||
import NavWrapper from '../components/nav-wrapper'
|
||||
import Footer from '../components/sections/footer'
|
||||
import { getCachedContributorsData, prefetchContributorsData } from '../utils/prefetch'
|
||||
import { GridPattern } from '@/app/(landing)/components/grid-pattern'
|
||||
import NavWrapper from '@/app/(landing)/components/nav-wrapper'
|
||||
import Footer from '@/app/(landing)/components/sections/footer'
|
||||
import { getCachedContributorsData, prefetchContributorsData } from '@/app/(landing)/utils/prefetch'
|
||||
|
||||
interface Contributor {
|
||||
login: string
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
'use client'
|
||||
|
||||
import NavWrapper from './components/nav-wrapper'
|
||||
import Footer from './components/sections/footer'
|
||||
import Hero from './components/sections/hero'
|
||||
import Integrations from './components/sections/integrations'
|
||||
import Testimonials from './components/sections/testimonials'
|
||||
import NavWrapper from '@/app/(landing)/components/nav-wrapper'
|
||||
import Footer from '@/app/(landing)/components/sections/footer'
|
||||
import Hero from '@/app/(landing)/components/sections/hero'
|
||||
import Integrations from '@/app/(landing)/components/sections/integrations'
|
||||
import Testimonials from '@/app/(landing)/components/sections/testimonials'
|
||||
|
||||
export default function Landing() {
|
||||
const handleOpenTypeformLink = () => {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { GridPattern } from '../components/grid-pattern'
|
||||
import NavWrapper from '../components/nav-wrapper'
|
||||
import Footer from '../components/sections/footer'
|
||||
import { GridPattern } from '@/app/(landing)/components/grid-pattern'
|
||||
import NavWrapper from '@/app/(landing)/components/nav-wrapper'
|
||||
import Footer from '@/app/(landing)/components/sections/footer'
|
||||
|
||||
export default function PrivacyPolicy() {
|
||||
const handleOpenTypeformLink = () => {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
'use client'
|
||||
|
||||
import Link from 'next/link'
|
||||
import { GridPattern } from '../components/grid-pattern'
|
||||
import NavWrapper from '../components/nav-wrapper'
|
||||
import Footer from '../components/sections/footer'
|
||||
import { GridPattern } from '@/app/(landing)/components/grid-pattern'
|
||||
import NavWrapper from '@/app/(landing)/components/nav-wrapper'
|
||||
import Footer from '@/app/(landing)/components/sections/footer'
|
||||
|
||||
export default function TermsOfService() {
|
||||
const handleOpenTypeformLink = () => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Utility for prefetching and caching contributors page data
|
||||
import { getCommitsData, getContributors, getRepositoryStats } from '../actions/github'
|
||||
import { generateActivityData, generateCommitTimelineData } from './github'
|
||||
import { getCommitsData, getContributors, getRepositoryStats } from '@/app/(landing)/actions/github'
|
||||
import { generateActivityData, generateCommitTimelineData } from '@/app/(landing)/utils/github'
|
||||
|
||||
interface Contributor {
|
||||
login: string
|
||||
|
||||
@@ -279,7 +279,7 @@ export function mockExecutionDependencies() {
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@/lib/logs/trace-spans', () => ({
|
||||
vi.mock('@/lib/logs/execution/trace-spans/trace-spans', () => ({
|
||||
buildTraceSpans: vi.fn().mockReturnValue({
|
||||
traceSpans: [],
|
||||
totalDuration: 100,
|
||||
@@ -375,7 +375,7 @@ export function mockWorkflowAccessValidation(shouldSucceed = true) {
|
||||
|
||||
export async function getMockedDependencies() {
|
||||
const utilsModule = await import('@/lib/utils')
|
||||
const traceSpansModule = await import('@/lib/logs/trace-spans')
|
||||
const traceSpansModule = await import('@/lib/logs/execution/trace-spans/trace-spans')
|
||||
const workflowUtilsModule = await import('@/lib/workflows/utils')
|
||||
const executorModule = await import('@/executor')
|
||||
const serializerModule = await import('@/serializer')
|
||||
@@ -655,7 +655,7 @@ export function mockKnowledgeSchemas() {
|
||||
* Mock console logger
|
||||
*/
|
||||
export function mockConsoleLogger() {
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ describe('Forget Password API Route', () => {
|
||||
redirectTo: 'https://example.com/reset',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -56,7 +56,7 @@ describe('Forget Password API Route', () => {
|
||||
email: 'test@example.com',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -79,7 +79,7 @@ describe('Forget Password API Route', () => {
|
||||
|
||||
const req = createMockRequest('POST', {})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -98,7 +98,7 @@ describe('Forget Password API Route', () => {
|
||||
email: '',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -126,7 +126,7 @@ describe('Forget Password API Route', () => {
|
||||
email: 'nonexistent@example.com',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -134,7 +134,7 @@ describe('Forget Password API Route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.message).toBe(errorMessage)
|
||||
|
||||
const logger = await import('@/lib/logs/console-logger')
|
||||
const logger = await import('@/lib/logs/console/logger')
|
||||
const mockLogger = logger.createLogger('ForgetPasswordTest')
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Error requesting password reset:', {
|
||||
error: expect.any(Error),
|
||||
@@ -156,7 +156,7 @@ describe('Forget Password API Route', () => {
|
||||
email: 'test@example.com',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/forget-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -164,7 +164,7 @@ describe('Forget Password API Route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.message).toBe('Failed to send password reset email. Please try again later.')
|
||||
|
||||
const logger = await import('@/lib/logs/console-logger')
|
||||
const logger = await import('@/lib/logs/console/logger')
|
||||
const mockLogger = logger.createLogger('ForgetPasswordTest')
|
||||
expect(mockLogger.error).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -51,7 +51,7 @@ describe('OAuth Connections API Route', () => {
|
||||
jwtDecode: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
})
|
||||
@@ -96,7 +96,7 @@ describe('OAuth Connections API Route', () => {
|
||||
mockDb.limit.mockResolvedValueOnce(mockUserRecord)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/connections/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -121,7 +121,7 @@ describe('OAuth Connections API Route', () => {
|
||||
mockGetSession.mockResolvedValueOnce(null)
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/connections/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -146,7 +146,7 @@ describe('OAuth Connections API Route', () => {
|
||||
mockDb.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/connections/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -165,7 +165,7 @@ describe('OAuth Connections API Route', () => {
|
||||
mockDb.where.mockRejectedValueOnce(new Error('Database error'))
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/connections/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -209,7 +209,7 @@ describe('OAuth Connections API Route', () => {
|
||||
mockDb.limit.mockResolvedValueOnce([])
|
||||
|
||||
const req = createMockRequest('GET')
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/connections/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -2,7 +2,7 @@ import { eq } from 'drizzle-orm'
|
||||
import { jwtDecode } from 'jwt-decode'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { account, user } from '@/db/schema'
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
jwtDecode: vi.fn(),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
})
|
||||
@@ -111,7 +111,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET', '?provider=google-email')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -135,7 +135,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET', '?provider=google')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -152,7 +152,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -177,7 +177,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET', '?provider=github')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -220,7 +220,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET', '?provider=google')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
@@ -244,7 +244,7 @@ describe('OAuth Credentials API Route', () => {
|
||||
|
||||
const req = createMockRequestWithQuery('GET', '?provider=google')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/credentials/route')
|
||||
|
||||
const response = await GET(req)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -2,7 +2,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { jwtDecode } from 'jwt-decode'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { OAuthService } from '@/lib/oauth/oauth'
|
||||
import { parseProvider } from '@/lib/oauth/oauth'
|
||||
import { db } from '@/db'
|
||||
|
||||
@@ -47,7 +47,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
or: vi.fn((...conditions) => ({ conditions, type: 'or' })),
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
})
|
||||
@@ -68,7 +68,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
provider: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/disconnect/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -91,7 +91,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
providerId: 'google-email',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/disconnect/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -108,7 +108,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
provider: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/disconnect/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -125,7 +125,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
|
||||
const req = createMockRequest('POST', {})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/disconnect/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -147,7 +147,7 @@ describe('OAuth Disconnect API Route', () => {
|
||||
provider: 'google',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/disconnect/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { and, eq, like, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -28,13 +28,13 @@ describe('OAuth Token API Routes', () => {
|
||||
randomUUID: vi.fn().mockReturnValue(mockUUID),
|
||||
})
|
||||
|
||||
vi.doMock('../utils', () => ({
|
||||
vi.doMock('@/app/api/auth/oauth/utils', () => ({
|
||||
getUserId: mockGetUserId,
|
||||
getCredential: mockGetCredential,
|
||||
refreshTokenIfNeeded: mockRefreshTokenIfNeeded,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
})
|
||||
@@ -67,7 +67,7 @@ describe('OAuth Token API Routes', () => {
|
||||
})
|
||||
|
||||
// Import handler after setting up mocks
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
// Call handler
|
||||
const response = await POST(req)
|
||||
@@ -102,7 +102,7 @@ describe('OAuth Token API Routes', () => {
|
||||
workflowId: 'workflow-id',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -121,7 +121,7 @@ describe('OAuth Token API Routes', () => {
|
||||
it('should handle missing credentialId', async () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -138,7 +138,7 @@ describe('OAuth Token API Routes', () => {
|
||||
credentialId: 'credential-id',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -155,7 +155,7 @@ describe('OAuth Token API Routes', () => {
|
||||
workflowId: 'nonexistent-workflow-id',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -172,7 +172,7 @@ describe('OAuth Token API Routes', () => {
|
||||
credentialId: 'nonexistent-credential-id',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -196,7 +196,7 @@ describe('OAuth Token API Routes', () => {
|
||||
credentialId: 'credential-id',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -228,7 +228,7 @@ describe('OAuth Token API Routes', () => {
|
||||
'http://localhost:3000/api/auth/oauth/token?credentialId=credential-id'
|
||||
)
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
@@ -244,7 +244,7 @@ describe('OAuth Token API Routes', () => {
|
||||
it('should handle missing credentialId', async () => {
|
||||
const req = new Request('http://localhost:3000/api/auth/oauth/token')
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
@@ -261,7 +261,7 @@ describe('OAuth Token API Routes', () => {
|
||||
'http://localhost:3000/api/auth/oauth/token?credentialId=credential-id'
|
||||
)
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
@@ -278,7 +278,7 @@ describe('OAuth Token API Routes', () => {
|
||||
'http://localhost:3000/api/auth/oauth/token?credentialId=nonexistent-credential-id'
|
||||
)
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
@@ -300,7 +300,7 @@ describe('OAuth Token API Routes', () => {
|
||||
'http://localhost:3000/api/auth/oauth/token?credentialId=credential-id'
|
||||
)
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
@@ -325,7 +325,7 @@ describe('OAuth Token API Routes', () => {
|
||||
'http://localhost:3000/api/auth/oauth/token?credentialId=credential-id'
|
||||
)
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/auth/oauth/token/route')
|
||||
|
||||
const response = await GET(req as any)
|
||||
const data = await response.json()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getCredential, getUserId, refreshTokenIfNeeded } from '../utils'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getCredential, getUserId, refreshTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ describe('OAuth Utils', () => {
|
||||
refreshOAuthToken: mockRefreshOAuthToken,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue(mockLogger),
|
||||
}))
|
||||
})
|
||||
@@ -50,7 +50,7 @@ describe('OAuth Utils', () => {
|
||||
|
||||
describe('getUserId', () => {
|
||||
it('should get user ID from session when no workflowId is provided', async () => {
|
||||
const { getUserId } = await import('./utils')
|
||||
const { getUserId } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const userId = await getUserId('request-id')
|
||||
|
||||
@@ -60,7 +60,7 @@ describe('OAuth Utils', () => {
|
||||
it('should get user ID from workflow when workflowId is provided', async () => {
|
||||
mockDb.limit.mockReturnValueOnce([{ userId: 'workflow-owner-id' }])
|
||||
|
||||
const { getUserId } = await import('./utils')
|
||||
const { getUserId } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const userId = await getUserId('request-id', 'workflow-id')
|
||||
|
||||
@@ -76,7 +76,7 @@ describe('OAuth Utils', () => {
|
||||
getSession: vi.fn().mockResolvedValue(null),
|
||||
}))
|
||||
|
||||
const { getUserId } = await import('./utils')
|
||||
const { getUserId } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const userId = await getUserId('request-id')
|
||||
|
||||
@@ -87,7 +87,7 @@ describe('OAuth Utils', () => {
|
||||
it('should return undefined if workflow is not found', async () => {
|
||||
mockDb.limit.mockReturnValueOnce([])
|
||||
|
||||
const { getUserId } = await import('./utils')
|
||||
const { getUserId } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const userId = await getUserId('request-id', 'nonexistent-workflow-id')
|
||||
|
||||
@@ -101,7 +101,7 @@ describe('OAuth Utils', () => {
|
||||
const mockCredential = { id: 'credential-id', userId: 'test-user-id' }
|
||||
mockDb.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
const { getCredential } = await import('./utils')
|
||||
const { getCredential } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const credential = await getCredential('request-id', 'credential-id', 'test-user-id')
|
||||
|
||||
@@ -116,7 +116,7 @@ describe('OAuth Utils', () => {
|
||||
it('should return undefined when credential is not found', async () => {
|
||||
mockDb.limit.mockReturnValueOnce([])
|
||||
|
||||
const { getCredential } = await import('./utils')
|
||||
const { getCredential } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const credential = await getCredential('request-id', 'nonexistent-id', 'test-user-id')
|
||||
|
||||
@@ -135,7 +135,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
}
|
||||
|
||||
const { refreshTokenIfNeeded } = await import('./utils')
|
||||
const { refreshTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
|
||||
@@ -159,7 +159,7 @@ describe('OAuth Utils', () => {
|
||||
refreshToken: 'new-refresh-token',
|
||||
})
|
||||
|
||||
const { refreshTokenIfNeeded } = await import('./utils')
|
||||
const { refreshTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
|
||||
@@ -183,7 +183,7 @@ describe('OAuth Utils', () => {
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||
|
||||
const { refreshTokenIfNeeded } = await import('./utils')
|
||||
const { refreshTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
await expect(
|
||||
refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
@@ -201,7 +201,7 @@ describe('OAuth Utils', () => {
|
||||
providerId: 'google',
|
||||
}
|
||||
|
||||
const { refreshTokenIfNeeded } = await import('./utils')
|
||||
const { refreshTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const result = await refreshTokenIfNeeded('request-id', mockCredential, 'credential-id')
|
||||
|
||||
@@ -222,7 +222,7 @@ describe('OAuth Utils', () => {
|
||||
}
|
||||
mockDb.limit.mockReturnValueOnce([mockCredential])
|
||||
|
||||
const { refreshAccessTokenIfNeeded } = await import('./utils')
|
||||
const { refreshAccessTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
@@ -247,7 +247,7 @@ describe('OAuth Utils', () => {
|
||||
refreshToken: 'new-refresh-token',
|
||||
})
|
||||
|
||||
const { refreshAccessTokenIfNeeded } = await import('./utils')
|
||||
const { refreshAccessTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
@@ -260,7 +260,7 @@ describe('OAuth Utils', () => {
|
||||
it('should return null if credential not found', async () => {
|
||||
mockDb.limit.mockReturnValueOnce([])
|
||||
|
||||
const { refreshAccessTokenIfNeeded } = await import('./utils')
|
||||
const { refreshAccessTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('nonexistent-id', 'test-user-id', 'request-id')
|
||||
|
||||
@@ -281,7 +281,7 @@ describe('OAuth Utils', () => {
|
||||
|
||||
mockRefreshOAuthToken.mockResolvedValueOnce(null)
|
||||
|
||||
const { refreshAccessTokenIfNeeded } = await import('./utils')
|
||||
const { refreshAccessTokenIfNeeded } = await import('@/app/api/auth/oauth/utils')
|
||||
|
||||
const token = await refreshAccessTokenIfNeeded('credential-id', 'test-user-id', 'request-id')
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshOAuthToken } from '@/lib/oauth/oauth'
|
||||
import { db } from '@/db'
|
||||
import { account, workflow } from '@/db/schema'
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { refreshAccessTokenIfNeeded } from '@/app/api/auth/oauth/utils'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: 'newSecurePassword123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -52,7 +52,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: 'newSecurePassword123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -91,7 +91,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: 'newSecurePassword123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -111,7 +111,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: '',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -140,7 +140,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: 'newSecurePassword123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -148,8 +148,8 @@ describe('Reset Password API Route', () => {
|
||||
expect(response.status).toBe(500)
|
||||
expect(data.message).toBe(errorMessage)
|
||||
|
||||
const logger = await import('@/lib/logs/console-logger')
|
||||
const mockLogger = logger.createLogger('PasswordReset')
|
||||
const logger = await import('@/lib/logs/console/logger')
|
||||
const mockLogger = logger.createLogger('PasswordResetAPI')
|
||||
expect(mockLogger.error).toHaveBeenCalledWith('Error during password reset:', {
|
||||
error: expect.any(Error),
|
||||
})
|
||||
@@ -171,7 +171,7 @@ describe('Reset Password API Route', () => {
|
||||
newPassword: 'newSecurePassword123',
|
||||
})
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/auth/reset-password/route')
|
||||
|
||||
const response = await POST(req)
|
||||
const data = await response.json()
|
||||
@@ -181,8 +181,8 @@ describe('Reset Password API Route', () => {
|
||||
'Failed to reset password. Please try again or request a new reset link.'
|
||||
)
|
||||
|
||||
const logger = await import('@/lib/logs/console-logger')
|
||||
const mockLogger = logger.createLogger('PasswordReset')
|
||||
const logger = await import('@/lib/logs/console/logger')
|
||||
const mockLogger = logger.createLogger('PasswordResetAPI')
|
||||
expect(mockLogger.error).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('PasswordReset')
|
||||
const logger = createLogger('PasswordResetAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { processDailyBillingCheck } from '@/lib/billing/core/billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('DailyBillingCron')
|
||||
|
||||
|
||||
@@ -3,12 +3,14 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { getSimplifiedBillingSummary } from '@/lib/billing/core/billing'
|
||||
import { getOrganizationBillingData } from '@/lib/billing/core/organization-billing'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { db } from '@/db'
|
||||
import { member } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('UnifiedBillingAPI')
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
/**
|
||||
* Unified Billing Endpoint
|
||||
*/
|
||||
|
||||
@@ -4,7 +4,7 @@ import type Stripe from 'stripe'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { handleInvoiceWebhook } from '@/lib/billing/webhooks/stripe-invoice-webhooks'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('StripeInvoiceWebhook')
|
||||
|
||||
|
||||
@@ -3,12 +3,12 @@ import type { NextRequest } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { renderOTPEmail } from '@/components/emails/render-email'
|
||||
import { sendEmail } from '@/lib/email/mailer'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { getRedisClient, markMessageAsProcessed, releaseLock } from '@/lib/redis'
|
||||
import { addCorsHeaders, setChatAuthCookie } from '@/app/api/chat/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { chat } from '@/db/schema'
|
||||
import { addCorsHeaders, setChatAuthCookie } from '../../utils'
|
||||
|
||||
const logger = createLogger('ChatOtpAPI')
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules()
|
||||
|
||||
vi.doMock('../utils', () => ({
|
||||
vi.doMock('@/app/api/chat/utils', () => ({
|
||||
addCorsHeaders: mockAddCorsHeaders,
|
||||
validateChatAuth: mockValidateChatAuth,
|
||||
setChatAuthCookie: mockSetChatAuthCookie,
|
||||
@@ -75,7 +75,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
executeWorkflowForChat: mockExecuteWorkflowForChat,
|
||||
}))
|
||||
|
||||
vi.doMock('@/lib/logs/console-logger', () => ({
|
||||
vi.doMock('@/lib/logs/console/logger', () => ({
|
||||
createLogger: vi.fn().mockReturnValue({
|
||||
debug: vi.fn(),
|
||||
info: vi.fn(),
|
||||
@@ -138,7 +138,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('GET')
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await GET(req, { params })
|
||||
|
||||
@@ -169,7 +169,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('GET')
|
||||
const params = Promise.resolve({ subdomain: 'nonexistent' })
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await GET(req, { params })
|
||||
|
||||
@@ -203,7 +203,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('GET')
|
||||
const params = Promise.resolve({ subdomain: 'inactive-chat' })
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await GET(req, { params })
|
||||
|
||||
@@ -224,7 +224,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('GET')
|
||||
const params = Promise.resolve({ subdomain: 'password-protected-chat' })
|
||||
|
||||
const { GET } = await import('./route')
|
||||
const { GET } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await GET(req, { params })
|
||||
|
||||
@@ -245,7 +245,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { password: 'test-password' })
|
||||
const params = Promise.resolve({ subdomain: 'password-protected-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -261,7 +261,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', {})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -282,7 +282,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'protected-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -345,7 +345,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -360,7 +360,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world', conversationId: 'conv-123' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -377,7 +377,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -407,7 +407,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Trigger error' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -426,12 +426,13 @@ describe('Chat Subdomain API Route', () => {
|
||||
// Create a request with invalid JSON
|
||||
const req = {
|
||||
method: 'POST',
|
||||
headers: new Headers(),
|
||||
json: vi.fn().mockRejectedValue(new Error('Invalid JSON')),
|
||||
} as any
|
||||
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
const response = await POST(req, { params })
|
||||
|
||||
@@ -449,7 +450,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
})
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
await POST(req, { params })
|
||||
|
||||
@@ -464,7 +465,7 @@ describe('Chat Subdomain API Route', () => {
|
||||
const req = createMockRequest('POST', { input: 'Hello world' })
|
||||
const params = Promise.resolve({ subdomain: 'test-chat' })
|
||||
|
||||
const { POST } = await import('./route')
|
||||
const { POST } = await import('@/app/api/chat/[subdomain]/route')
|
||||
|
||||
await POST(req, { params })
|
||||
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { chat, workflow } from '@/db/schema'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
addCorsHeaders,
|
||||
executeWorkflowForChat,
|
||||
setChatAuthCookie,
|
||||
validateAuthToken,
|
||||
validateChatAuth,
|
||||
} from '../utils'
|
||||
} from '@/app/api/chat/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
import { db } from '@/db'
|
||||
import { chat, workflow } from '@/db/schema'
|
||||
|
||||
const logger = createLogger('ChatSubdomainAPI')
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user