diff --git a/.devcontainer/.bashrc b/.devcontainer/.bashrc index 705d31fbb..823b2a980 100644 --- a/.devcontainer/.bashrc +++ b/.devcontainer/.bashrc @@ -18,14 +18,20 @@ alias pgc="PGPASSWORD=postgres psql -h db -U postgres -d simstudio" alias check-db="PGPASSWORD=postgres psql -h db -U postgres -c '\l'" # Sim Studio specific aliases -alias logs="cd /workspace/sim && tail -f logs/*.log 2>/dev/null || echo 'No log files found'" -alias sim-start="cd /workspace/sim && npm run dev" -alias sim-migrate="cd /workspace/sim && npx drizzle-kit push" -alias sim-generate="cd /workspace/sim && npx drizzle-kit generate" -alias sim-rebuild="cd /workspace/sim && npm run build && npm start" +alias logs="cd /workspace/apps/sim && tail -f logs/*.log 2>/dev/null || echo 'No log files found'" +alias sim-start="cd /workspace && npm run dev" +alias sim-migrate="cd /workspace/apps/sim && npx drizzle-kit push" +alias sim-generate="cd /workspace/apps/sim && npx drizzle-kit generate" +alias sim-rebuild="cd /workspace && npm run build && npm run dev" +alias docs-dev="cd /workspace/apps/docs && npm run dev" -# Default to sim directory -cd /workspace/sim 2>/dev/null || true +# Turbo related commands +alias turbo-build="cd /workspace && npx turbo run build" +alias turbo-dev="cd /workspace && npx turbo run dev" +alias turbo-test="cd /workspace && npx turbo run test" + +# Default to workspace directory +cd /workspace 2>/dev/null || true # Welcome message - only show once per session if [ -z "$SIM_WELCOME_SHOWN" ]; then @@ -36,10 +42,16 @@ if [ -z "$SIM_WELCOME_SHOWN" ]; then echo "πŸš€ Welcome to Sim Studio development environment!" echo "" echo "Available commands:" - echo " sim-start - Start the development server" - echo " sim-migrate - Push schema changes to the database" - echo " sim-generate - Generate new migrations" - echo " sim-rebuild - Build and start the production server" + echo " sim-start - Start all apps in development mode" + echo " sim-migrate - Push schema changes to the database for sim app" + echo " sim-generate - Generate new migrations for sim app" + echo " sim-rebuild - Build and start all apps" + echo " docs-dev - Start only the docs app in development mode" + echo "" + echo "Turbo commands:" + echo " turbo-build - Build all apps using Turborepo" + echo " turbo-dev - Start development mode for all apps" + echo " turbo-test - Run tests for all packages" echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" echo "" fi \ No newline at end of file diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index dd485c55c..26b56927b 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -23,7 +23,7 @@ RUN echo "$USERNAME ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/$USERNAME \ RUN npm install -g npm@latest # Install global packages -RUN npm install -g drizzle-kit +RUN npm install -g drizzle-kit turbo # Switch back to dialog for any ad-hoc use of apt-get ENV DEBIAN_FRONTEND=dialog @@ -31,4 +31,5 @@ ENV DEBIAN_FRONTEND=dialog WORKDIR /workspace # Expose the ports we're interested in -EXPOSE 3000 \ No newline at end of file +EXPOSE 3000 +EXPOSE 3001 \ No newline at end of file diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index feca73626..9e8bd2fe4 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -18,8 +18,9 @@ services: db: condition: service_healthy ports: - - "3000:3000" - working_dir: /workspace/sim + - '3000:3000' + - '3001:3001' + working_dir: /workspace db: image: postgres:16 @@ -31,12 +32,12 @@ services: - POSTGRES_PASSWORD=postgres - POSTGRES_DB=simstudio ports: - - "5432:5432" + - '5432:5432' healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ['CMD-SHELL', 'pg_isready -U postgres'] interval: 5s timeout: 5s retries: 5 volumes: - postgres-data: \ No newline at end of file + postgres-data: diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh index 71c4d0a90..4bd0add3f 100755 --- a/.devcontainer/post-create.sh +++ b/.devcontainer/post-create.sh @@ -5,8 +5,8 @@ set -e echo "πŸ”§ Setting up Sim Studio development environment..." -# Change to the sim directory -cd /workspace/sim +# Change to the workspace root directory +cd /workspace # Setup .bashrc echo "πŸ“„ Setting up .bashrc with aliases..." @@ -19,6 +19,8 @@ echo "πŸ“¦ Cleaning and reinstalling npm dependencies..." if [ -d "node_modules" ]; then echo "Removing existing node_modules to ensure platform compatibility..." rm -rf node_modules + rm -rf apps/sim/node_modules + rm -rf apps/docs/node_modules fi # Install dependencies with platform-specific binaries @@ -26,16 +28,22 @@ npm install || { echo "⚠️ npm install had issues but continuing setup..." } -# Set up environment variables if .env doesn't exist -if [ ! -f ".env" ]; then +# Set up environment variables if .env doesn't exist for the sim app +if [ ! -f "apps/sim/.env" ]; then echo "πŸ“„ Creating .env file from template..." - cp .env.example .env 2>/dev/null || echo "DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio" > .env + if [ -f "apps/sim/.env.example" ]; then + cp apps/sim/.env.example apps/sim/.env + else + echo "DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio" > apps/sim/.env + fi fi # Generate schema and run database migrations echo "πŸ—ƒοΈ Running database schema generation and migrations..." echo "Generating schema..." +cd apps/sim npx drizzle-kit generate +cd ../.. echo "Waiting for database to be ready..." # Try to connect to the database, but don't fail the script if it doesn't work @@ -44,7 +52,9 @@ echo "Waiting for database to be ready..." while [ $timeout -gt 0 ]; do if PGPASSWORD=postgres psql -h db -U postgres -c '\q' 2>/dev/null; then echo "Database is ready!" + cd apps/sim DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio npx drizzle-kit push + cd ../.. break fi echo "Database is unavailable - sleeping (${timeout}s remaining)" @@ -61,13 +71,13 @@ echo "Waiting for database to be ready..." cat << EOF >> ~/.bashrc # Additional Sim Studio Development Aliases -alias migrate="cd /workspace/sim && DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio npx drizzle-kit push" -alias generate="cd /workspace/sim && npx drizzle-kit generate" -alias dev="cd /workspace/sim && npm run dev" -alias build="cd /workspace/sim && npm run build" -alias start="cd /workspace/sim && npm run start" -alias lint="cd /workspace/sim && npm run lint" -alias test="cd /workspace/sim && npm run test" +alias migrate="cd /workspace/apps/sim && DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio npx drizzle-kit push" +alias generate="cd /workspace/apps/sim && npx drizzle-kit generate" +alias dev="cd /workspace && npm run dev" +alias build="cd /workspace && npm run build" +alias start="cd /workspace && npm run dev" +alias lint="cd /workspace/apps/sim && npm run lint" +alias test="cd /workspace && npm run test" EOF # Source the .bashrc to make aliases available immediately diff --git a/sim/.env.example b/.env.example similarity index 100% rename from sim/.env.example rename to .env.example diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index 2694feb2a..af0865e10 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -14,22 +14,22 @@ appearance, race, religion, or sexual identity and orientation. Examples of behaviour that contributes to a positive environment for our community include: -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologising to those affected by our mistakes, +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologising to those affected by our mistakes, and learning from the experience -* Focusing on what is best not just for us as individuals, but for the +- Focusing on what is best not just for us as individuals, but for the overall community Examples of unacceptable behaviour include: -* The use of sexualised language or imagery, and sexual attention or advances -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email +- The use of sexualised language or imagery, and sexual attention or advances +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email address, without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a +- Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities @@ -112,4 +112,4 @@ the community. This Code of Conduct is adapted from the [Contributor Covenant](https://contributor-covenant.org/), version [1.4](https://www.contributor-covenant.org/version/1/4/code-of-conduct/code_of_conduct.md) and [2.0](https://www.contributor-covenant.org/version/2/0/code_of_conduct/code_of_conduct.md), -and was generated by [contributing.md](https://contributing.md/generator). \ No newline at end of file +and was generated by [contributing.md](https://contributing.md/generator). diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index b9d859d2c..8b63aa60f 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -3,7 +3,7 @@ Thank you for your interest in contributing to Sim Studio! Our goal is to provide developers with a powerful, user-friendly platform for building, testing, and optimizing agentic workflows. We welcome contributions in all formsβ€”from bug fixes and design improvements to brand-new features. > **Project Overview:** -> Sim Studio is a monorepo containing the main application (`sim/`) and documentation (`docs/`). The main application is built with Next.js (app router), ReactFlow, Zustand, Shadcn, and Tailwind CSS. Please ensure your contributions follow our best practices for clarity, maintainability, and consistency. +> Sim Studio is a monorepo using Turborepo, containing the main application (`apps/sim/`), documentation (`apps/docs/`), and shared packages (`packages/`). The main application is built with Next.js (app router), ReactFlow, Zustand, Shadcn, and Tailwind CSS. Please ensure your contributions follow our best practices for clarity, maintainability, and consistency. --- @@ -269,26 +269,26 @@ Sim Studio is built in a modular fashion where blocks and tools extend the platf ### Where to Add Your Code -- **Blocks:** Create your new block file under the `/sim/blocks/blocks` directory. The name of the file should match the provider name (e.g., `pinecone.ts`). -- **Tools:** Create a new directory under `/sim/tools` with the same name as the provider (e.g., `/sim/tools/pinecone`). +- **Blocks:** Create your new block file under the `/apps/sim/blocks/blocks` directory. The name of the file should match the provider name (e.g., `pinecone.ts`). +- **Tools:** Create a new directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`). In addition, you will need to update the registries: -- **Block Registry:** Update the blocks index (`/sim/blocks/index.ts`) to include your new block. -- **Tool Registry:** Update the tools registry (`/sim/tools/index.ts`) to add your new tool. +- **Block Registry:** Update the blocks index (`/apps/sim/blocks/index.ts`) to include your new block. +- **Tool Registry:** Update the tools registry (`/apps/sim/tools/index.ts`) to add your new tool. ### How to Create a New Block 1. **Create a New File:** - Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `/sim/blocks/blocks` directory. + Create a file for your block named after the provider (e.g., `pinecone.ts`) in the `/apps/sim/blocks/blocks` directory. 2. **Create a New Icon:** - Create a new icon for your block in the `/sim/components/icons.tsx` file. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`). + Create a new icon for your block in the `/apps/sim/components/icons.tsx` file. The icon should follow the same naming convention as the block (e.g., `PineconeIcon`). 3. **Define the Block Configuration:** Your block should export a constant of type `BlockConfig`. For example: - ```typescript:/sim/blocks/blocks/pinecone.ts + ```typescript:/apps/sim/blocks/blocks/pinecone.ts import { PineconeIcon } from '@/components/icons' import { PineconeResponse } from '@/tools/pinecone/types' import { BlockConfig } from '../types' @@ -313,11 +313,11 @@ In addition, you will need to update the registries: ``` 4. **Register Your Block:** - Add your block to the blocks registry (`/sim/blocks/registry.ts`): + Add your block to the blocks registry (`/apps/sim/blocks/registry.ts`): - ```typescript:/sim/blocks/registry.ts + ```typescript:/apps/sim/blocks/registry.ts import { PineconeBlock } from './blocks/pinecone' - + // Registry of all available blocks export const registry: Record = { // ... existing blocks @@ -333,7 +333,7 @@ In addition, you will need to update the registries: ### How to Create a New Tool 1. **Create a New Directory:** - Create a directory under `/sim/tools` with the same name as the provider (e.g., `/sim/tools/pinecone`). + Create a directory under `/apps/sim/tools` with the same name as the provider (e.g., `/apps/sim/tools/pinecone`). 2. **Create Tool Files:** Create separate files for each tool functionality with descriptive names (e.g., `fetch.ts`, `generate_embeddings.ts`, `search_text.ts`) in your tool directory. @@ -344,7 +344,7 @@ In addition, you will need to update the registries: 4. **Create an Index File:** Create an `index.ts` file in your tool directory that imports and exports all tools: - ```typescript:/sim/tools/pinecone/index.ts + ```typescript:/apps/sim/tools/pinecone/index.ts import { fetchTool } from './fetch' import { generateEmbeddingsTool } from './generate_embeddings' import { searchTextTool } from './search_text' @@ -355,7 +355,7 @@ In addition, you will need to update the registries: 5. **Define the Tool Configuration:** Your tool should export a constant with a naming convention of `{toolName}Tool`. The tool ID should follow the format `{provider}_{tool_name}`. For example: - ```typescript:/sim/tools/pinecone/fetch.ts + ```typescript:/apps/sim/tools/pinecone/fetch.ts import { ToolConfig, ToolResponse } from '../types' import { PineconeParams, PineconeResponse } from './types' @@ -384,9 +384,9 @@ In addition, you will need to update the registries: ``` 6. **Register Your Tool:** - Update the tools registry in `/sim/tools/index.ts` to include your new tool: + Update the tools registry in `/apps/sim/tools/index.ts` to include your new tool: - ```typescript:/sim/tools/index.ts + ```typescript:/apps/sim/tools/index.ts import { fetchTool, generateEmbeddingsTool, searchTextTool } from './pinecone' // ... other imports diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 4743ebc74..7955b54bb 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,7 +1,7 @@ --- name: Bug report about: Create a report to help us improve -title: "[BUG]" +title: '[BUG]' labels: bug assignees: '' --- @@ -11,6 +11,7 @@ A clear and concise description of what the bug is. **To Reproduce** Steps to reproduce the behavior: + 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' @@ -23,4 +24,4 @@ A clear and concise description of what you expected to happen. If applicable, add screenshots to help explain your problem. **Additional context** -Add any other context about the problem here. \ No newline at end of file +Add any other context about the problem here. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 97ba1a469..cac49aae9 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,7 +1,7 @@ --- name: Feature request about: Suggest an idea for this project -title: "[REQUEST]" +title: '[REQUEST]' labels: feature assignees: '' --- @@ -16,4 +16,4 @@ A clear and concise description of what you want to happen. A clear and concise description of any alternative solutions or features you've considered. **Additional context** -Add any other context or screenshots about the feature request here. \ No newline at end of file +Add any other context or screenshots about the feature request here. diff --git a/.github/SECURITY.md b/.github/SECURITY.md index 1429f0952..e09e1cbbc 100644 --- a/.github/SECURITY.md +++ b/.github/SECURITY.md @@ -6,7 +6,6 @@ | ------- | ------------------ | | 0.1.x | :white_check_mark: | - ## Reporting a Vulnerability We take the security of Sim Studio seriously. If you believe you've found a security vulnerability, please follow these steps: @@ -16,6 +15,7 @@ We take the security of Sim Studio seriously. If you believe you've found a secu 2. **Email us directly** at security@simstudio.ai with details of the vulnerability. 3. **Include the following information** in your report: + - Description of the vulnerability - Steps to reproduce - Potential impact @@ -23,4 +23,4 @@ We take the security of Sim Studio seriously. If you believe you've found a secu 4. We will acknowledge receipt of your vulnerability report within 48 hours and provide an estimated timeline for a fix. -5. Once the vulnerability is fixed, we will notify you and publicly acknowledge your contribution (unless you prefer to remain anonymous). \ No newline at end of file +5. Once the vulnerability is fixed, we will notify you and publicly acknowledge your contribution (unless you prefer to remain anonymous). diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 909f0023a..4d74eca96 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,98 +1,98 @@ version: 2 updates: - - package-ecosystem: "npm" - directory: "/sim" + - package-ecosystem: 'npm' + directory: '/sim' schedule: - interval: "weekly" - day: "monday" - time: "09:00" + interval: 'weekly' + day: 'monday' + time: '09:00' # Disable version updates open-pull-requests-limit: 0 labels: - - "dependencies" - - "security" + - 'dependencies' + - 'security' commit-message: - prefix: "fix(deps)" - prefix-development: "chore(deps)" - include: "scope" + prefix: 'fix(deps)' + prefix-development: 'chore(deps)' + include: 'scope' groups: dependencies: applies-to: security-updates patterns: - - "*" - + - '*' + # Documentation site dependencies (/docs) - - package-ecosystem: "npm" - directory: "/docs" + - package-ecosystem: 'npm' + directory: '/docs' schedule: - interval: "weekly" - day: "wednesday" + interval: 'weekly' + day: 'wednesday' # Disable version updates open-pull-requests-limit: 0 labels: - - "dependencies" - - "security" + - 'dependencies' + - 'security' commit-message: - prefix: "docs(deps)" - include: "scope" + prefix: 'docs(deps)' + include: 'scope' groups: docs-dependencies: applies-to: security-updates patterns: - - "*" - + - '*' + # Root-level dependencies (if any) - - package-ecosystem: "npm" - directory: "/" + - package-ecosystem: 'npm' + directory: '/' schedule: - interval: "weekly" - day: "friday" + interval: 'weekly' + day: 'friday' # Disable version updates open-pull-requests-limit: 0 labels: - - "dependencies" - - "security" + - 'dependencies' + - 'security' commit-message: - prefix: "chore(deps)" - include: "scope" + prefix: 'chore(deps)' + include: 'scope' groups: root-dependencies: applies-to: security-updates patterns: - - "*" - + - '*' + # GitHub Actions workflows - - package-ecosystem: "github-actions" - directory: "/" + - package-ecosystem: 'github-actions' + directory: '/' schedule: - interval: "monthly" + interval: 'monthly' # Disable version updates open-pull-requests-limit: 0 labels: - - "dependencies" - - "security" + - 'dependencies' + - 'security' commit-message: - prefix: "ci(deps)" + prefix: 'ci(deps)' groups: actions: applies-to: security-updates patterns: - - "*" - + - '*' + # Docker containers (if applicable) - - package-ecosystem: "docker" - directory: "/" + - package-ecosystem: 'docker' + directory: '/' schedule: - interval: "monthly" + interval: 'monthly' # Disable version updates open-pull-requests-limit: 0 labels: - - "dependencies" - - "security" + - 'dependencies' + - 'security' commit-message: - prefix: "docker(deps)" + prefix: 'docker(deps)' groups: docker: applies-to: security-updates patterns: - - "*" \ No newline at end of file + - '*' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index dedb5282e..4a13b6387 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,51 +10,45 @@ jobs: test: name: Test and Build runs-on: ubuntu-latest - + steps: - name: Checkout code uses: actions/checkout@v4 - + - name: Setup Node.js uses: actions/setup-node@v4 with: node-version: '20' cache: 'npm' - cache-dependency-path: './sim/package-lock.json' - + cache-dependency-path: './package-lock.json' + - name: Install dependencies - working-directory: ./sim run: npm ci - - - name: Fix Rollup module issue - working-directory: ./sim - run: | - rm -rf node_modules package-lock.json - npm install - + + - name: Install Turbo globally + run: npm install -g turbo + - name: Run tests with coverage - working-directory: ./sim env: - NODE_OPTIONS: "--no-warnings" - run: npm run test:coverage - + NODE_OPTIONS: '--no-warnings' + run: npx turbo run test + - name: Build application - working-directory: ./sim env: - NODE_OPTIONS: "--no-warnings" - NEXT_PUBLIC_APP_URL: "https://www.simstudio.ai" - STRIPE_SECRET_KEY: "dummy_key_for_ci_only" - STRIPE_WEBHOOK_SECRET: "dummy_secret_for_ci_only" - RESEND_API_KEY: "dummy_key_for_ci_only" - AWS_REGION: "us-west-2" - run: npm run build - + NODE_OPTIONS: '--no-warnings' + NEXT_PUBLIC_APP_URL: 'https://www.simstudio.ai' + STRIPE_SECRET_KEY: 'dummy_key_for_ci_only' + STRIPE_WEBHOOK_SECRET: 'dummy_secret_for_ci_only' + RESEND_API_KEY: 'dummy_key_for_ci_only' + AWS_REGION: 'us-west-2' + run: npx turbo run build + - name: Upload coverage to Codecov uses: codecov/codecov-action@v3 with: - directory: ./sim/coverage + directory: ./apps/sim/coverage fail_ci_if_error: false - verbose: true + verbose: true migrations: name: Apply Database Migrations @@ -64,20 +58,19 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - + - name: Setup Node.js uses: actions/setup-node@v4 with: node-version: '20' cache: 'npm' - cache-dependency-path: './sim/package-lock.json' - + cache-dependency-path: './package-lock.json' + - name: Install dependencies - working-directory: ./sim run: npm ci - + - name: Apply migrations - working-directory: ./sim + working-directory: ./apps/sim env: DATABASE_URL: ${{ secrets.DATABASE_URL }} - run: npx drizzle-kit push \ No newline at end of file + run: npx drizzle-kit push diff --git a/.gitignore b/.gitignore index 6f833f1e4..f6f4a540d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,10 @@ # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. # dependencies -*/node_modules +/node_modules docs/node_modules /packages/**/node_modules +scripts/node_modules /.pnp .pnp.* .yarn/* @@ -67,10 +68,10 @@ docs/.content-collections # database instantiation **/postgres_data/ -# file uploads -uploads/ - # collector configuration collector-config.yaml docker-compose.collector.yml start-collector.sh + +# Turborepo +.turbo \ No newline at end of file diff --git a/sim/.prettierignore b/.prettierignore similarity index 100% rename from sim/.prettierignore rename to .prettierignore diff --git a/sim/.prettierrc b/.prettierrc similarity index 100% rename from sim/.prettierrc rename to .prettierrc diff --git a/Dockerfile b/Dockerfile index 272f7f971..136ff87b2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,19 +3,22 @@ FROM node:20-alpine # Set working directory WORKDIR /app -# Copy the entire sim directory -COPY sim/ ./ +# Copy the entire monorepo +COPY . ./ # Create the .env file if it doesn't exist -RUN touch .env +RUN touch apps/sim/.env -# Install dependencies +# Install dependencies for the monorepo RUN npm install -# Generate database schema -RUN npx drizzle-kit generate +# Install Turbo globally +RUN npm install -g turbo + +# Generate database schema for sim app +RUN cd apps/sim && npx drizzle-kit generate EXPOSE 3000 # Run migrations and start the app -CMD npx drizzle-kit push && npm run dev \ No newline at end of file +CMD cd apps/sim && npx drizzle-kit push && cd ../.. && npm run dev \ No newline at end of file diff --git a/README.md b/README.md index 33b66c40d..a9a7e634b 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@

- Sim Studio Logo + Sim Studio Logo

@@ -31,7 +31,7 @@ git clone https://github.com/YOUR_USERNAME/sim.git cd sim # Create environment file and update with required environment variables (BETTER_AUTH_SECRET) -cp sim/.env.example sim/.env +cp .env.example .env # Start Sim Studio using the provided script docker compose up -d --build @@ -72,7 +72,7 @@ To use local models with Sim Studio, follow these steps: ```bash # Run the ollama_docker.sh script to pull the required models - ./sim/scripts/ollama_docker.sh pull + ./apps/sim/scripts/ollama_docker.sh pull ``` 2. **Start Sim Studio with Local Models** @@ -111,7 +111,7 @@ services: simstudio: # ... existing configuration ... extra_hosts: - - "host.docker.internal:host-gateway" + - 'host.docker.internal:host-gateway' environment: - OLLAMA_HOST=http://host.docker.internal:11434 ``` @@ -122,7 +122,7 @@ services: 2. Install the [Remote - Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) 3. Open the project in your editor 4. Click "Reopen in Container" when prompted -5. The environment will automatically be set up in the `sim` directory +5. The environment will automatically be set up 6. Run `npm run dev` in the terminal or use the `sim-start` alias ### Option 3: Manual Setup @@ -132,7 +132,7 @@ services: ```bash # Clone the repository git clone https://github.com/YOUR_USERNAME/sim.git -cd sim/sim +cd sim # Install dependencies npm install @@ -141,8 +141,7 @@ npm install 2. **Set Up Environment** ```bash -# Copy .env.example to .env -cp .env.example .env +cp .env.example .env # or create a new .env file # Configure your .env file with the required environment variables: # - Database connection (PostgreSQL) @@ -150,6 +149,7 @@ cp .env.example .env ``` ⚠️ **Important Notes:** + - If `RESEND_API_KEY` is not set, verification codes for login/signup will be logged to the console. - You can use these logged codes for testing authentication locally. - For production environments, you should set up a proper email provider. @@ -158,6 +158,7 @@ cp .env.example .env ```bash # Push the database schema +cd apps/sim npx drizzle-kit push ``` @@ -165,6 +166,7 @@ npx drizzle-kit push ```bash # Start the development server +cd ../.. npm run dev ``` @@ -179,6 +181,7 @@ npm run dev - **State Management**: [Zustand](https://zustand-demo.pmnd.rs/) - **Flow Editor**: [ReactFlow](https://reactflow.dev/) - **Docs**: [Fumadocs](https://fumadocs.vercel.app/) +- **Monorepo**: [Turborepo](https://turborepo.org/) ## Contributing diff --git a/apps/docs/.gitignore b/apps/docs/.gitignore new file mode 100644 index 000000000..424c9fafc --- /dev/null +++ b/apps/docs/.gitignore @@ -0,0 +1,47 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files +.env +*.env +.env.local +.env.development +.env.test +.env.production + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts + +# Fumadocs +/.source/ \ No newline at end of file diff --git a/docs/README.md b/apps/docs/README.md similarity index 100% rename from docs/README.md rename to apps/docs/README.md diff --git a/docs/app/(docs)/[[...slug]]/layout.tsx b/apps/docs/app/(docs)/[[...slug]]/layout.tsx similarity index 87% rename from docs/app/(docs)/[[...slug]]/layout.tsx rename to apps/docs/app/(docs)/[[...slug]]/layout.tsx index 111918ba9..8f9568286 100644 --- a/docs/app/(docs)/[[...slug]]/layout.tsx +++ b/apps/docs/app/(docs)/[[...slug]]/layout.tsx @@ -1,7 +1,7 @@ import type { ReactNode } from 'react' import Link from 'next/link' import { DocsLayout } from 'fumadocs-ui/layouts/docs' -import { GithubIcon, ExternalLink } from 'lucide-react' +import { ExternalLink, GithubIcon } from 'lucide-react' import { source } from '@/lib/source' const GitHubLink = () => ( @@ -23,9 +23,7 @@ export default function Layout({ children }: { children: ReactNode }) { Sim Studio - ), + title:

Sim Studio
, }} links={[ { diff --git a/docs/app/(docs)/[[...slug]]/page.tsx b/apps/docs/app/(docs)/[[...slug]]/page.tsx similarity index 82% rename from docs/app/(docs)/[[...slug]]/page.tsx rename to apps/docs/app/(docs)/[[...slug]]/page.tsx index 981a150c1..2abc62f49 100644 --- a/docs/app/(docs)/[[...slug]]/page.tsx +++ b/apps/docs/app/(docs)/[[...slug]]/page.tsx @@ -1,18 +1,11 @@ import { notFound } from 'next/navigation' -import { - DocsBody, - DocsDescription, - DocsPage, - DocsTitle, -} from 'fumadocs-ui/page' -import { source } from '@/lib/source' +import { DocsBody, DocsDescription, DocsPage, DocsTitle } from 'fumadocs-ui/page' import mdxComponents from '@/components/mdx-components' +import { source } from '@/lib/source' export const dynamic = 'force-static' -export default async function Page(props: { - params: Promise<{ slug?: string[] }> -}) { +export default async function Page(props: { params: Promise<{ slug?: string[] }> }) { const params = await props.params const page = source.getPage(params.slug) if (!page) notFound() @@ -53,9 +46,7 @@ export async function generateStaticParams() { return source.generateParams() } -export async function generateMetadata(props: { - params: Promise<{ slug?: string[] }> -}) { +export async function generateMetadata(props: { params: Promise<{ slug?: string[] }> }) { const params = await props.params const page = source.getPage(params.slug) if (!page) notFound() diff --git a/docs/app/api/search/route.ts b/apps/docs/app/api/search/route.ts similarity index 100% rename from docs/app/api/search/route.ts rename to apps/docs/app/api/search/route.ts diff --git a/docs/app/global.css b/apps/docs/app/global.css similarity index 67% rename from docs/app/global.css rename to apps/docs/app/global.css index 6636dcc70..e2ac000dc 100644 --- a/docs/app/global.css +++ b/apps/docs/app/global.css @@ -2,17 +2,17 @@ @import 'fumadocs-ui/css/neutral.css'; @import 'fumadocs-ui/css/preset.css'; :root { - --color-fd-primary: #802FFF; /* Purple from control-bar component */ + --color-fd-primary: #802fff; /* Purple from control-bar component */ } /* Custom text highlighting styles */ .text-highlight { - color: var(--color-fd-primary); + color: var(--color-fd-primary); } /* Override marker color for highlighted lists */ .highlight-markers li::marker { - color: var(--color-fd-primary); + color: var(--color-fd-primary); } @source '../node_modules/fumadocs-ui/dist/**/*.js'; diff --git a/docs/app/layout.config.tsx b/apps/docs/app/layout.config.tsx similarity index 99% rename from docs/app/layout.config.tsx rename to apps/docs/app/layout.config.tsx index ba9d2d7dd..b9150a83b 100644 --- a/docs/app/layout.config.tsx +++ b/apps/docs/app/layout.config.tsx @@ -18,4 +18,4 @@ export const baseOptions: BaseLayoutProps = { ), }, -} \ No newline at end of file +} diff --git a/docs/app/layout.tsx b/apps/docs/app/layout.tsx similarity index 100% rename from docs/app/layout.tsx rename to apps/docs/app/layout.tsx diff --git a/docs/components/icons.tsx b/apps/docs/components/icons.tsx similarity index 100% rename from docs/components/icons.tsx rename to apps/docs/components/icons.tsx diff --git a/docs/components/mdx-components.tsx b/apps/docs/components/mdx-components.tsx similarity index 83% rename from docs/components/mdx-components.tsx rename to apps/docs/components/mdx-components.tsx index a19f5c268..cdd241bea 100644 --- a/docs/components/mdx-components.tsx +++ b/apps/docs/components/mdx-components.tsx @@ -4,7 +4,7 @@ import { ThemeImage } from './ui/theme-image' // Extend the default MDX components with our custom components const mdxComponents = { ...defaultMdxComponents, - ThemeImage + ThemeImage, } -export default mdxComponents \ No newline at end of file +export default mdxComponents diff --git a/docs/components/ui/block-info-card.tsx b/apps/docs/components/ui/block-info-card.tsx similarity index 86% rename from docs/components/ui/block-info-card.tsx rename to apps/docs/components/ui/block-info-card.tsx index 7120984e2..4ba816d50 100644 --- a/docs/components/ui/block-info-card.tsx +++ b/apps/docs/components/ui/block-info-card.tsx @@ -3,22 +3,22 @@ import * as React from 'react' interface BlockInfoCardProps { - type: string; - color: string; - icon?: boolean; - iconSvg?: string; + type: string + color: string + icon?: boolean + iconSvg?: string } -export function BlockInfoCard({ - type, - color, +export function BlockInfoCard({ + type, + color, icon = false, - iconSvg + iconSvg, }: BlockInfoCardProps): React.ReactNode { return (
-
@@ -41,4 +41,4 @@ export function BlockInfoCard({ )}
) -} \ No newline at end of file +} diff --git a/docs/components/ui/block-types.tsx b/apps/docs/components/ui/block-types.tsx similarity index 81% rename from docs/components/ui/block-types.tsx rename to apps/docs/components/ui/block-types.tsx index e1484da44..0205aadd3 100644 --- a/docs/components/ui/block-types.tsx +++ b/apps/docs/components/ui/block-types.tsx @@ -20,14 +20,22 @@ const BlockFeature = ({ itemsPerRow: number }) => { const blockColor = { - '--block-color': title === 'Agent' ? '#8b5cf6' : - title === 'API' ? '#3b82f6' : - title === 'Condition' ? '#f59e0b' : - title === 'Function' ? '#10b981' : - title === 'Router' ? '#6366f1' : - title === 'Evaluator' ? '#ef4444' : '#8b5cf6' + '--block-color': + title === 'Agent' + ? '#8b5cf6' + : title === 'API' + ? '#3b82f6' + : title === 'Condition' + ? '#f59e0b' + : title === 'Function' + ? '#10b981' + : title === 'Router' + ? '#6366f1' + : title === 'Evaluator' + ? '#ef4444' + : '#8b5cf6', } as React.CSSProperties - + const content = ( <> {index < itemsPerRow && ( @@ -36,13 +44,17 @@ const BlockFeature = ({ {index >= itemsPerRow && (
)} -
{icon}
-
+
{title} diff --git a/docs/components/ui/features.tsx b/apps/docs/components/ui/features.tsx similarity index 95% rename from docs/components/ui/features.tsx rename to apps/docs/components/ui/features.tsx index 54ecd32dd..6b142da9c 100644 --- a/docs/components/ui/features.tsx +++ b/apps/docs/components/ui/features.tsx @@ -1,10 +1,10 @@ import { IconAdjustmentsBolt, IconCloud, - IconHistory, IconEaseInOut, IconHeart, IconHelp, + IconHistory, IconRouteAltLeft, IconTerminal2, } from '@tabler/icons-react' @@ -14,8 +14,7 @@ export function Features() { const features = [ { title: 'Multi-LLM Support', - description: - 'Connect to any LLM provider including OpenAI, Anthropic, and more', + description: 'Connect to any LLM provider including OpenAI, Anthropic, and more', icon: , }, { @@ -88,9 +87,7 @@ export const Feature = ({ {index >= 4 && (
)} -
- {icon} -
+
{icon}
diff --git a/docs/components/ui/theme-image.tsx b/apps/docs/components/ui/theme-image.tsx similarity index 74% rename from docs/components/ui/theme-image.tsx rename to apps/docs/components/ui/theme-image.tsx index 27d4fbcc4..5b8ab3ff3 100644 --- a/docs/components/ui/theme-image.tsx +++ b/apps/docs/components/ui/theme-image.tsx @@ -1,8 +1,8 @@ 'use client' -import { useTheme } from 'next-themes' -import Image from 'next/image' import { useEffect, useState } from 'react' +import Image from 'next/image' +import { useTheme } from 'next-themes' interface ThemeImageProps { lightSrc: string @@ -13,13 +13,13 @@ interface ThemeImageProps { className?: string } -export function ThemeImage({ - lightSrc, - darkSrc, - alt, - width = 600, +export function ThemeImage({ + lightSrc, + darkSrc, + alt, + width = 600, height = 400, - className = 'rounded-lg border border-border my-6' + className = 'rounded-lg border border-border my-6', }: ThemeImageProps) { const { resolvedTheme } = useTheme() const [imageSrc, setImageSrc] = useState(lightSrc) @@ -42,13 +42,7 @@ export function ThemeImage({ return (
- {alt} + {alt}
) -} \ No newline at end of file +} diff --git a/docs/content/docs/blocks/agent.mdx b/apps/docs/content/docs/blocks/agent.mdx similarity index 66% rename from docs/content/docs/blocks/agent.mdx rename to apps/docs/content/docs/blocks/agent.mdx index 887386ef2..a5ea6eb4c 100644 --- a/docs/content/docs/blocks/agent.mdx +++ b/apps/docs/content/docs/blocks/agent.mdx @@ -4,22 +4,23 @@ description: Create powerful AI agents using any LLM provider --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The Agent block is a fundamental component in Sim Studio that allows you to create powerful AI agents using various LLM providers. These agents can process inputs based on customizable system prompts and utilize integrated tools to enhance their capabilities. - - Agent blocks serve as interfaces to Large Language Models, enabling your workflow to leverage state-of-the-art AI capabilities. + Agent blocks serve as interfaces to Large Language Models, enabling your workflow to leverage + state-of-the-art AI capabilities. ## Overview @@ -28,10 +29,12 @@ The Agent block serves as an interface to Large Language Models (LLMs), enabling - Respond to user inputs: Generate natural language responses based on provided inputs + Respond to user inputs: Generate natural language responses based on provided + inputs - Follow instructions: Adhere to specific instructions defined in the system prompt + Follow instructions: Adhere to specific instructions defined in the system + prompt Use specialized tools: Interact with integrated tools to extend capabilities @@ -78,17 +81,28 @@ Control the creativity and randomness of responses: -

More deterministic, focused responses. Best for factual tasks, customer support, and situations where accuracy is critical.

+

+ More deterministic, focused responses. Best for factual tasks, customer support, and + situations where accuracy is critical. +

-

Balanced creativity and focus. Suitable for general purpose applications that require both accuracy and some creativity.

+

+ Balanced creativity and focus. Suitable for general purpose applications that require both + accuracy and some creativity. +

-

More creative, varied responses. Ideal for creative writing, brainstorming, and generating diverse ideas.

+

+ More creative, varied responses. Ideal for creative writing, brainstorming, and generating + diverse ideas. +

-

The temperature range (0-1 or 0-2) varies depending on the selected model.

+

+ The temperature range (0-1 or 0-2) varies depending on the selected model. +

### API Key @@ -102,12 +116,12 @@ Integrate specialized tools to enhance the agent's capabilities. You can add too 2. Selecting from the tools dropdown menu 3. Choosing an existing tool or creating a new one - Available tools include: @@ -122,7 +136,8 @@ Available tools include: You can also create custom tools to meet specific requirements for your agent's capabilities. - Tools significantly expand what your agent can do, allowing it to access external systems, retrieve information, and take actions beyond simple text generation. + Tools significantly expand what your agent can do, allowing it to access external systems, + retrieve information, and take actions beyond simple text generation. ### Response Format @@ -133,20 +148,38 @@ Define a structured format for the agent's response when needed, using JSON or o -
    -
  • User Prompt: The user's query or context for the agent
  • -
  • System Prompt: Instructions for the agent (optional)
  • -
  • Tools: Optional tool connections that the agent can use
  • +
      +
    • + User Prompt: The user's query or context for the agent +
    • +
    • + System Prompt: Instructions for the agent (optional) +
    • +
    • + Tools: Optional tool connections that the agent can use +
    -
      -
    • Content: The agent's response text
    • -
    • Model: The model used for generation
    • -
    • Tokens: Usage statistics (prompt, completion, total)
    • -
    • Tool Calls: Details of any tools used during processing
    • -
    • Cost: Cost of the response
    • -
    • Usage: Usage statistics (prompt, completion, total)
    • +
        +
      • + Content: The agent's response text +
      • +
      • + Model: The model used for generation +
      • +
      • + Tokens: Usage statistics (prompt, completion, total) +
      • +
      • + Tool Calls: Details of any tools used during processing +
      • +
      • + Cost: Cost of the response +
      • +
      • + Usage: Usage statistics (prompt, completion, total) +
      @@ -176,4 +209,4 @@ tools: - **Be specific in system prompts**: Clearly define the agent's role, tone, and limitations. The more specific your instructions are, the better the agent will be able to fulfill its intended purpose. - **Choose the right temperature setting**: Use lower temperature settings (0-0.3) when accuracy is important, or increase temperature (0.7-2.0) for more creative or varied responses - **Combine with Evaluator blocks**: Use Evaluator blocks to assess agent responses and ensure quality. This allows you to create feedback loops and implement quality control measures. -- **Leverage tools effectively**: Integrate tools that complement the agent's purpose and enhance its capabilities. Be selective about which tools you provide to avoid overwhelming the agent. \ No newline at end of file +- **Leverage tools effectively**: Integrate tools that complement the agent's purpose and enhance its capabilities. Be selective about which tools you provide to avoid overwhelming the agent. diff --git a/docs/content/docs/blocks/api.mdx b/apps/docs/content/docs/blocks/api.mdx similarity index 91% rename from docs/content/docs/blocks/api.mdx rename to apps/docs/content/docs/blocks/api.mdx index d7b780ee0..086a92a07 100644 --- a/docs/content/docs/blocks/api.mdx +++ b/apps/docs/content/docs/blocks/api.mdx @@ -4,18 +4,18 @@ description: Connect to external services through API endpoints --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The API block enables you to connect your workflow to external services through HTTP requests. It supports various methods like GET, POST, PUT, DELETE, and PATCH, allowing you to interact with virtually any API endpoint. - ## Overview @@ -125,4 +125,4 @@ headers: - **Handle errors gracefully**: Connect error handling logic for failed requests - **Validate responses**: Check status codes and response formats before processing data - **Respect rate limits**: Be mindful of API rate limits and implement appropriate throttling -- **Cache responses when appropriate**: For frequently accessed data that doesn't change often \ No newline at end of file +- **Cache responses when appropriate**: For frequently accessed data that doesn't change often diff --git a/docs/content/docs/blocks/condition.mdx b/apps/docs/content/docs/blocks/condition.mdx similarity index 64% rename from docs/content/docs/blocks/condition.mdx rename to apps/docs/content/docs/blocks/condition.mdx index 39c2755cf..4946492c7 100644 --- a/docs/content/docs/blocks/condition.mdx +++ b/apps/docs/content/docs/blocks/condition.mdx @@ -4,53 +4,54 @@ description: Create conditional logic and branching in your workflows --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' -import { Files, Folder, File } from 'fumadocs-ui/components/files' +import { File, Files, Folder } from 'fumadocs-ui/components/files' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The Condition block allows you to branch your workflow execution path based on boolean expressions. It evaluates conditions and routes the workflow accordingly, enabling you to create dynamic, responsive workflows with different execution paths. - - Condition blocks enable deterministic decision-making without requiring an LLM, making them ideal for straightforward branching logic. + Condition blocks enable deterministic decision-making without requiring an LLM, making them ideal + for straightforward branching logic. ## Overview The Condition block serves as a decision point in your workflow, enabling: -
      -
      -

      Branching Logic

      +
      +
      +

      Branching Logic

      Create different execution paths based on specific conditions
      - -
      -

      Rule-Based Routing

      + +
      +

      Rule-Based Routing

      Route workflows deterministically without needing an LLM
      - -
      -

      Data-Driven Decisions

      + +
      +

      Data-Driven Decisions

      Create workflow paths based on structured data values
      - -
      -

      If-Then-Else Logic

      + +
      +

      If-Then-Else Logic

      Implement conditional programming paradigms in your workflows
      @@ -117,17 +118,29 @@ Conditions use JavaScript syntax and can reference input values from previous bl -
        -
      • Variables: Values from previous blocks that can be referenced in conditions
      • -
      • Conditions: Boolean expressions to evaluate
      • +
          +
        • + Variables: Values from previous blocks that can be referenced in conditions +
        • +
        • + Conditions: Boolean expressions to evaluate +
        -
          -
        • Content: A description of the evaluation result
        • -
        • Condition Result: The boolean result of the condition evaluation
        • -
        • Selected Path: Details of the chosen routing destination
        • -
        • Selected Condition ID: Identifier of the condition that was matched
        • +
            +
          • + Content: A description of the evaluation result +
          • +
          • + Condition Result: The boolean result of the condition evaluation +
          • +
          • + Selected Path: Details of the chosen routing destination +
          • +
          • + Selected Condition ID: Identifier of the condition that was matched +
          @@ -139,20 +152,20 @@ Here's an example of how a Condition block might be used in a customer satisfact ```yaml # Example Condition Configuration conditions: - - id: "high_satisfaction" - expression: "input.satisfactionScore >= 8" - description: "Customer is highly satisfied" - path: "positive_feedback_block" - - - id: "medium_satisfaction" - expression: "input.satisfactionScore >= 5" - description: "Customer is moderately satisfied" - path: "neutral_feedback_block" - - - id: "default" - expression: "true" - description: "Customer is not satisfied" - path: "improvement_feedback_block" + - id: 'high_satisfaction' + expression: 'input.satisfactionScore >= 8' + description: 'Customer is highly satisfied' + path: 'positive_feedback_block' + + - id: 'medium_satisfaction' + expression: 'input.satisfactionScore >= 5' + description: 'Customer is moderately satisfied' + path: 'neutral_feedback_block' + + - id: 'default' + expression: 'true' + description: 'Customer is not satisfied' + path: 'improvement_feedback_block' ``` ## Best Practices @@ -175,4 +188,4 @@ Add descriptions to explain the purpose of each condition. This helps other team ### Test edge cases -Ensure your conditions handle boundary values correctly. Test with values at the edges of your condition ranges to verify correct behavior. \ No newline at end of file +Ensure your conditions handle boundary values correctly. Test with values at the edges of your condition ranges to verify correct behavior. diff --git a/docs/content/docs/blocks/evaluator.mdx b/apps/docs/content/docs/blocks/evaluator.mdx similarity index 92% rename from docs/content/docs/blocks/evaluator.mdx rename to apps/docs/content/docs/blocks/evaluator.mdx index 35c2d5a19..dd398345a 100644 --- a/docs/content/docs/blocks/evaluator.mdx +++ b/apps/docs/content/docs/blocks/evaluator.mdx @@ -4,18 +4,18 @@ description: Assess content quality using customizable evaluation metrics --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The Evaluator block allows you to assess the quality of content using customizable evaluation metrics. This is particularly useful for evaluating AI-generated text, ensuring outputs meet specific criteria, and building quality-control mechanisms into your workflows. - ## Overview @@ -102,7 +102,7 @@ Here's an example of how an Evaluator block might be configured for assessing cu metrics: - name: Empathy description: How well does the response acknowledge and address the customer's emotional state? - range: + range: min: 1 max: 5 - name: Solution @@ -125,4 +125,4 @@ model: Anthropic/claude-3-opus - **Choose appropriate ranges**: Select scoring ranges that provide enough granularity without being overly complex - **Connect with Agent blocks**: Use Evaluator blocks to assess Agent block outputs and create feedback loops - **Use consistent metrics**: For comparative analysis, maintain consistent metrics across similar evaluations -- **Combine multiple metrics**: Use several metrics to get a comprehensive evaluation \ No newline at end of file +- **Combine multiple metrics**: Use several metrics to get a comprehensive evaluation diff --git a/docs/content/docs/blocks/function.mdx b/apps/docs/content/docs/blocks/function.mdx similarity index 93% rename from docs/content/docs/blocks/function.mdx rename to apps/docs/content/docs/blocks/function.mdx index 9340f9cde..ab1098835 100644 --- a/docs/content/docs/blocks/function.mdx +++ b/apps/docs/content/docs/blocks/function.mdx @@ -4,18 +4,18 @@ description: Execute custom JavaScript or TypeScript code in your workflows --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The Function block allows you to write and execute custom JavaScript or TypeScript code directly within your workflow. This powerful feature enables you to implement complex logic, data transformations, and integration with external libraries. - ## Overview @@ -134,4 +134,4 @@ return { - **Handle errors gracefully**: Use try/catch blocks to handle potential errors - **Document your code**: Add comments to explain complex logic - **Test edge cases**: Ensure your code handles unusual inputs correctly -- **Optimize for performance**: Be mindful of computational complexity for large datasets \ No newline at end of file +- **Optimize for performance**: Be mindful of computational complexity for large datasets diff --git a/docs/content/docs/blocks/index.mdx b/apps/docs/content/docs/blocks/index.mdx similarity index 86% rename from docs/content/docs/blocks/index.mdx rename to apps/docs/content/docs/blocks/index.mdx index f190861ab..85df573da 100644 --- a/docs/content/docs/blocks/index.mdx +++ b/apps/docs/content/docs/blocks/index.mdx @@ -4,8 +4,8 @@ description: Building blocks for your agentic workflows --- import { Card, Cards } from 'fumadocs-ui/components/card' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { BlockTypes } from '@/components/ui/block-types' Blocks are the fundamental building components of Sim Studio workflows. Each block has a specific purpose and can be connected to other blocks to create sophisticated workflows. @@ -26,13 +26,16 @@ Blocks can be connected to form a directed graph representing your workflow. Eac - Outputs to Inputs: A block's outputs can be connected to another block's inputs. + Outputs to Inputs: A block's outputs can be connected to another block's + inputs. - Multiple Connections: A block can have multiple incoming and outgoing connections. + Multiple Connections: A block can have multiple incoming and outgoing + connections. - Conditional Flows: Some blocks (like Router and Condition) can have multiple output paths based on conditions. + Conditional Flows: Some blocks (like Router and Condition) can have multiple + output paths based on conditions. @@ -48,4 +51,4 @@ Each block type has its own configuration options allowing you to customize its - **Retry policies**: Configure how the block handles failures - **Error handling behavior**: Define how errors are managed and reported -See the specific documentation for each block type to learn about its configuration options. \ No newline at end of file +See the specific documentation for each block type to learn about its configuration options. diff --git a/docs/content/docs/blocks/meta.json b/apps/docs/content/docs/blocks/meta.json similarity index 100% rename from docs/content/docs/blocks/meta.json rename to apps/docs/content/docs/blocks/meta.json diff --git a/docs/content/docs/blocks/router.mdx b/apps/docs/content/docs/blocks/router.mdx similarity index 92% rename from docs/content/docs/blocks/router.mdx rename to apps/docs/content/docs/blocks/router.mdx index 3d709772e..574ac4686 100644 --- a/docs/content/docs/blocks/router.mdx +++ b/apps/docs/content/docs/blocks/router.mdx @@ -4,19 +4,18 @@ description: Route workflow execution based on specific conditions or logic --- import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' import { ThemeImage } from '@/components/ui/theme-image' The Router block is a powerful component in Sim Studio that intelligently routes workflow execution based on content analysis, user input, or predefined conditions. It acts as a decision-making junction in your workflow, directing the flow to different paths based on various criteria. - - ## Overview @@ -104,6 +103,7 @@ model: OpenAI/gpt-4 ``` In this example, the Router might be connected to: + - A product support block - A billing inquiries block - A technical support block @@ -117,4 +117,4 @@ Based on the user's query, the Router would analyze the content and direct it to - **Use specific routing criteria**: Define clear conditions for selecting each path - **Consider fallback paths**: Connect a default destination for when no specific path is appropriate - **Test with diverse inputs**: Ensure the Router handles various input types correctly -- **Review routing decisions**: Monitor the Router's performance and refine as needed \ No newline at end of file +- **Review routing decisions**: Monitor the Router's performance and refine as needed diff --git a/docs/content/docs/connections/accessing-data.mdx b/apps/docs/content/docs/connections/accessing-data.mdx similarity index 86% rename from docs/content/docs/connections/accessing-data.mdx rename to apps/docs/content/docs/connections/accessing-data.mdx index fda7fec93..db5a058bd 100644 --- a/docs/content/docs/connections/accessing-data.mdx +++ b/apps/docs/content/docs/connections/accessing-data.mdx @@ -4,7 +4,7 @@ description: Techniques for accessing and manipulating data from connected block --- import { Callout } from 'fumadocs-ui/components/callout' -import { Files, Folder, File } from 'fumadocs-ui/components/files' +import { File, Files, Folder } from 'fumadocs-ui/components/files' Once blocks are connected, you can access data from source blocks in destination blocks using connection tags and various data access techniques. @@ -130,7 +130,8 @@ return { message }; It's important to handle cases where connected data might be missing or null: - Always validate connected data before using it, especially when accessing nested properties or array elements. + Always validate connected data before using it, especially when accessing nested properties or + array elements. ### Default Values @@ -138,9 +139,9 @@ It's important to handle cases where connected data might be missing or null: In Function blocks, you can provide default values for missing data: ```javascript -const userName = input.userBlock?.name || 'Guest'; -const items = input.orderBlock?.items || []; -const total = input.orderBlock?.total ?? 0; +const userName = input.userBlock?.name || 'Guest' +const items = input.orderBlock?.items || [] +const total = input.orderBlock?.total ?? 0 ``` ### Conditional Checks @@ -148,9 +149,9 @@ const total = input.orderBlock?.total ?? 0; Check if data exists before accessing nested properties: ```javascript -let userEmail = 'No email provided'; +let userEmail = 'No email provided' if (input.userBlock && input.userBlock.contact && input.userBlock.contact.email) { - userEmail = input.userBlock.contact.email; + userEmail = input.userBlock.contact.email } ``` @@ -159,8 +160,8 @@ if (input.userBlock && input.userBlock.contact && input.userBlock.contact.email) In Function blocks, use optional chaining to safely access nested properties: ```javascript -const userCity = input.userBlock?.address?.city; -const firstItemName = input.orderBlock?.items?.[0]?.name; +const userCity = input.userBlock?.address?.city +const firstItemName = input.orderBlock?.items?.[0]?.name ``` ## Debugging Connection Data @@ -175,15 +176,15 @@ When troubleshooting connection issues, these techniques can help: ```javascript // Example debugging function function debugConnections() { - console.log('All inputs:', input); - console.log('User data type:', typeof input.userBlock); - console.log('Order items:', input.orderBlock?.items); - + console.log('All inputs:', input) + console.log('User data type:', typeof input.userBlock) + console.log('Order items:', input.orderBlock?.items) + return { debug: true, allInputs: input, userExists: !!input.userBlock, - orderItemCount: input.orderBlock?.items?.length || 0 - }; + orderItemCount: input.orderBlock?.items?.length || 0, + } } -``` \ No newline at end of file +``` diff --git a/docs/content/docs/connections/basics.mdx b/apps/docs/content/docs/connections/basics.mdx similarity index 91% rename from docs/content/docs/connections/basics.mdx rename to apps/docs/content/docs/connections/basics.mdx index 1eac77806..de57b6aa2 100644 --- a/docs/content/docs/connections/basics.mdx +++ b/apps/docs/content/docs/connections/basics.mdx @@ -4,21 +4,23 @@ description: Learn how connections work in Sim Studio --- import { Callout } from 'fumadocs-ui/components/callout' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' ## How Connections Work Connections are the pathways that allow data to flow between blocks in your workflow. When you connect two blocks in Sim Studio, you're establishing a data flow relationship that defines how information passes from one block to another. - Each connection represents a directed relationship where data flows from a source block's output to a destination block's input. + Each connection represents a directed relationship where data flows from a source block's output + to a destination block's input. ### Creating Connections - Select Source Block: Click on the output port of the block you want to connect from + Select Source Block: Click on the output port of the block you want to connect + from Draw Connection: Drag to the input port of the destination block @@ -59,7 +61,8 @@ You can manage your connections in several ways: - **Disable**: Temporarily disable a connection without deleting it - Deleting a connection will immediately stop data flow between the blocks. Make sure this is intended before removing connections. + Deleting a connection will immediately stop data flow between the blocks. Make sure this is + intended before removing connections. ## Connection Compatibility @@ -70,4 +73,4 @@ Not all blocks can be connected to each other. Compatibility depends on: 2. **Block Restrictions**: Some blocks may have restrictions on what they can connect to 3. **Workflow Logic**: Connections must make logical sense in the context of your workflow -The editor will indicate when connections are invalid or incompatible. \ No newline at end of file +The editor will indicate when connections are invalid or incompatible. diff --git a/docs/content/docs/connections/best-practices.mdx b/apps/docs/content/docs/connections/best-practices.mdx similarity index 80% rename from docs/content/docs/connections/best-practices.mdx rename to apps/docs/content/docs/connections/best-practices.mdx index 9d77cdd65..a178ea791 100644 --- a/docs/content/docs/connections/best-practices.mdx +++ b/apps/docs/content/docs/connections/best-practices.mdx @@ -4,7 +4,7 @@ description: Recommended patterns for effective connection management --- import { Callout } from 'fumadocs-ui/components/callout' -import { Steps, Step } from 'fumadocs-ui/components/steps' +import { Step, Steps } from 'fumadocs-ui/components/steps' ## Workflow Organization @@ -18,7 +18,8 @@ Keep your workflow clean and understandable by organizing connections logically: - **Label complex connections** with descriptive names - A well-organized workflow is easier to understand, debug, and maintain. Take time to arrange your blocks and connections in a logical manner. + A well-organized workflow is easier to understand, debug, and maintain. Take time to arrange your + blocks and connections in a logical manner. ### Connection Naming Conventions @@ -27,13 +28,16 @@ When working with multiple connections, consistent naming helps maintain clarity - Use descriptive block names: Name blocks based on their function (e.g., "UserDataFetcher", "ResponseGenerator") + Use descriptive block names: Name blocks based on their function (e.g., + "UserDataFetcher", "ResponseGenerator") - Be specific with connection references: Use clear variable names when referencing connections in code + Be specific with connection references: Use clear variable names when + referencing connections in code - Document complex connections: Add comments explaining non-obvious data transformations + Document complex connections: Add comments explaining non-obvious data + transformations @@ -53,23 +57,23 @@ Ensure that the data being passed between blocks is compatible: function processUserData() { // Validate required fields if (!input.userBlock || !input.userBlock.id) { - return { error: "Missing user data", valid: false }; + return { error: 'Missing user data', valid: false } } - + // Transform and validate data types - const userId = String(input.userBlock.id); - const userName = input.userBlock.name || "Unknown User"; - const userScore = Number(input.userBlock.score) || 0; - + const userId = String(input.userBlock.id) + const userName = input.userBlock.name || 'Unknown User' + const userScore = Number(input.userBlock.score) || 0 + return { valid: true, user: { id: userId, name: userName, score: userScore, - isHighScore: userScore > 100 - } - }; + isHighScore: userScore > 100, + }, + } } ``` @@ -89,11 +93,11 @@ Add comments or descriptions to clarify the purpose of connections, especially i * This function processes user data from the UserFetcher block * and order history from the OrderHistory block to generate * personalized product recommendations. - * + * * Input: * - userBlock: User profile data (id, preferences, history) * - orderBlock: Recent order history (items, dates, amounts) - * + * * Output: * - recommendations: Array of recommended product IDs * - userSegment: Calculated user segment for marketing @@ -118,28 +122,28 @@ Verify that connection references work as expected: ```javascript // Example: Testing connection references with edge cases function testConnections() { - console.log("Testing connections..."); - + console.log('Testing connections...') + // Log all inputs for debugging - console.log("All inputs:", JSON.stringify(input, null, 2)); - + console.log('All inputs:', JSON.stringify(input, null, 2)) + // Test for missing data - const hasUserData = !!input.userBlock; - console.log("Has user data:", hasUserData); - + const hasUserData = !!input.userBlock + console.log('Has user data:', hasUserData) + // Test edge cases - const items = input.orderBlock?.items || []; - console.log("Item count:", items.length); - console.log("Empty items test:", items.length === 0 ? "Passed" : "Failed"); - + const items = input.orderBlock?.items || [] + console.log('Item count:', items.length) + console.log('Empty items test:', items.length === 0 ? 'Passed' : 'Failed') + // Return test results return { tests: { hasUserData, hasItems: items.length > 0, - hasLargeOrder: items.length > 10 - } - }; + hasLargeOrder: items.length > 10, + }, + } } ``` @@ -157,15 +161,15 @@ Keep your workflows efficient by optimizing how data flows through connections: ```javascript // Example: Optimizing data flow by filtering function optimizeUserData() { - const userData = input.userBlock; - + const userData = input.userBlock + // Only pass necessary fields to downstream blocks return { id: userData.id, name: userData.name, email: userData.email, // Filter out unnecessary profile data, history, etc. - }; + } } ``` @@ -181,7 +185,8 @@ Protect sensitive information when using connections: - **Use secure connections** for external API calls - Be careful when logging connection data that might contain sensitive information. Always redact or mask sensitive fields like passwords, API keys, or personal information. + Be careful when logging connection data that might contain sensitive information. Always redact or + mask sensitive fields like passwords, API keys, or personal information. ## Advanced Patterns @@ -200,4 +205,4 @@ Create more sophisticated workflows with feedback connections: - **Implement iterative processing** by connecting later blocks back to earlier ones - **Use Memory blocks** to store state between iterations -- **Set termination conditions** to prevent infinite loops \ No newline at end of file +- **Set termination conditions** to prevent infinite loops diff --git a/docs/content/docs/connections/data-structure.mdx b/apps/docs/content/docs/connections/data-structure.mdx similarity index 95% rename from docs/content/docs/connections/data-structure.mdx rename to apps/docs/content/docs/connections/data-structure.mdx index ac0658fda..94e3fdb90 100644 --- a/docs/content/docs/connections/data-structure.mdx +++ b/apps/docs/content/docs/connections/data-structure.mdx @@ -3,13 +3,14 @@ title: Connection Data Structure description: Understanding the data structure of different block outputs --- -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' import { Callout } from 'fumadocs-ui/components/callout' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' When you connect blocks, the output data structure from the source block determines what values are available in the destination block. Each block type produces a specific output structure that you can reference in downstream blocks. - Understanding these data structures is essential for effectively using connection tags and accessing the right data in your workflows. + Understanding these data structures is essential for effectively using connection tags and + accessing the right data in your workflows. ## Block Output Structures @@ -44,6 +45,7 @@ Different block types produce different output structures. Here's what you can e - **toolCalls**: Array of tool calls made by the agent (if any) - **cost**: Array of cost objects for each tool call (if any) - **usage**: Token usage statistics for the entire response + ```json @@ -62,6 +64,7 @@ Different block types produce different output structures. Here's what you can e - **data**: The response data from the API (can be any type) - **status**: HTTP status code of the response - **headers**: HTTP headers returned by the API + ```json @@ -77,6 +80,7 @@ Different block types produce different output structures. Here's what you can e - **result**: The return value of the function (can be any type) - **stdout**: Console output captured during function execution - **executionTime**: Time taken to execute the function (in milliseconds) + ```json @@ -100,6 +104,7 @@ Different block types produce different output structures. Here's what you can e - **model**: The AI model used for evaluation - **tokens**: Token usage statistics - **[metricName]**: Score for each metric defined in the evaluator (dynamic fields) + ```json @@ -124,6 +129,7 @@ Different block types produce different output structures. Here's what you can e - **blockType**: Type of the next block - **blockTitle**: Title of the next block - **selectedConditionId**: ID of the selected condition + ```json @@ -152,6 +158,7 @@ Different block types produce different output structures. Here's what you can e - **blockId**: ID of the selected destination block - **blockType**: Type of the selected block - **blockTitle**: Title of the selected block + @@ -166,7 +173,8 @@ Some blocks may produce custom output structures based on their configuration: 3. **API Blocks**: The `data` field will contain whatever the API returns, which could be any valid JSON structure. - Always check the actual output structure of your blocks during development to ensure you're referencing the correct fields in your connections. + Always check the actual output structure of your blocks during development to ensure you're + referencing the correct fields in your connections. ## Nested Data Structures @@ -178,6 +186,7 @@ Many block outputs contain nested data structures. You can access these using do ``` For example: + - `` - Access the total tokens from an Agent block - `` - Access the ID of the first result from an API response -- `` - Access a nested field in a Function block's result \ No newline at end of file +- `` - Access a nested field in a Function block's result diff --git a/docs/content/docs/connections/index.mdx b/apps/docs/content/docs/connections/index.mdx similarity index 85% rename from docs/content/docs/connections/index.mdx rename to apps/docs/content/docs/connections/index.mdx index 50b08b900..5af22a22e 100644 --- a/docs/content/docs/connections/index.mdx +++ b/apps/docs/content/docs/connections/index.mdx @@ -3,26 +3,19 @@ title: Connections description: Connect your blocks to one another. --- -import { Card, Cards } from 'fumadocs-ui/components/card' import { Callout } from 'fumadocs-ui/components/callout' +import { Card, Cards } from 'fumadocs-ui/components/card' import { ConnectIcon } from '@/components/icons' Connections are the pathways that allow data to flow between blocks in your workflow. They define how information is passed from one block to another, enabling you to create sophisticated, multi-step processes. - Properly configured connections are essential for creating effective workflows. They determine how data moves through your system and how blocks interact with each other. + Properly configured connections are essential for creating effective workflows. They determine how + data moves through your system and how blocks interact with each other.
          - +
          ## Connection Types @@ -46,4 +39,3 @@ Sim Studio supports different types of connections that enable various workflow Follow recommended patterns for effective connection management - diff --git a/docs/content/docs/connections/meta.json b/apps/docs/content/docs/connections/meta.json similarity index 98% rename from docs/content/docs/connections/meta.json rename to apps/docs/content/docs/connections/meta.json index eb4351176..81197105c 100644 --- a/docs/content/docs/connections/meta.json +++ b/apps/docs/content/docs/connections/meta.json @@ -1,4 +1,4 @@ { "title": "Connections", "pages": ["basics", "tags", "data-structure", "accessing-data", "best-practices"] -} \ No newline at end of file +} diff --git a/docs/content/docs/connections/tags.mdx b/apps/docs/content/docs/connections/tags.mdx similarity index 72% rename from docs/content/docs/connections/tags.mdx rename to apps/docs/content/docs/connections/tags.mdx index d66799c68..c64c83c4e 100644 --- a/docs/content/docs/connections/tags.mdx +++ b/apps/docs/content/docs/connections/tags.mdx @@ -8,15 +8,7 @@ import { Callout } from 'fumadocs-ui/components/callout' Connection tags are visual representations of the data available from connected blocks. They provide an easy way to reference outputs from previous blocks in your workflow.
          - +
          ### What Are Connection Tags? @@ -28,35 +20,40 @@ Connection tags are interactive elements that appear when blocks are connected. - Create dynamic data flows between blocks - Connection tags make it easy to see what data is available from previous blocks and use it in your current block without having to remember complex data structures. + Connection tags make it easy to see what data is available from previous blocks and use it in your + current block without having to remember complex data structures. ## Using Connection Tags There are two primary ways to use connection tags in your workflows: -
          -
          -

          Drag and Drop

          +
          +
          +

          Drag and Drop

          - Click on a connection tag and drag it into input fields of destination blocks. A dropdown will appear showing available values. + Click on a connection tag and drag it into input fields of destination blocks. A dropdown will + appear showing available values.
          -
            +
            1. Hover over a connection tag to see available data
            2. Click and drag the tag to an input field
            3. Select the specific data field from the dropdown
            4. The reference is inserted automatically
          - -
          -

          Angle Bracket Syntax

          + +
          +

          Angle Bracket Syntax

          - Type <> in input fields to see a dropdown of available connection values from previous blocks. + Type <> in input fields to see a dropdown of available connection values + from previous blocks.
          -
            +
            1. Click in any input field where you want to use connected data
            2. -
            3. Type <> to trigger the connection dropdown
            4. +
            5. + Type <> to trigger the connection dropdown +
            6. Browse and select the data you want to reference
            7. Continue typing or select from the dropdown to complete the reference
            @@ -72,10 +69,12 @@ Connection tags use a simple syntax to reference data: ``` Where: + - `blockId` is the identifier of the source block - `path.to.data` is the path to the specific data field For example: + - `` - References the content field from a block with ID "agent1" - `` - References the name of the first user in the users array from the data field of a block with ID "api2" @@ -105,5 +104,6 @@ const total = * 1.1; // Add 10% tax ``` - When using connection tags in numeric contexts, make sure the referenced data is actually a number to avoid type conversion issues. - \ No newline at end of file + When using connection tags in numeric contexts, make sure the referenced data is actually a number + to avoid type conversion issues. + diff --git a/docs/content/docs/execution/advanced.mdx b/apps/docs/content/docs/execution/advanced.mdx similarity index 70% rename from docs/content/docs/execution/advanced.mdx rename to apps/docs/content/docs/execution/advanced.mdx index 9b2a8c4cf..5f750820d 100644 --- a/docs/content/docs/execution/advanced.mdx +++ b/apps/docs/content/docs/execution/advanced.mdx @@ -3,9 +3,9 @@ title: Advanced Execution Features description: Master advanced execution capabilities in Sim Studio --- -import { Callout } from 'fumadocs-ui/components/callout' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' import { Accordion, Accordions } from 'fumadocs-ui/components/accordion' +import { Callout } from 'fumadocs-ui/components/callout' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' Sim Studio provides several advanced features that give you more control over workflow execution, error handling, and performance optimization. @@ -26,8 +26,8 @@ try { } catch (error) { // Handle the error gracefully console.error("Failed to parse JSON:", error.message); - return { - success: false, + return { + success: false, error: error.message, fallbackData: { status: "error", message: "Could not process data" } }; @@ -44,7 +44,8 @@ Comprehensive error information is captured in the execution logs: - **Timestamps**: When the error occurred - Error logs are invaluable for debugging workflows. Always check the logs first when troubleshooting execution issues. + Error logs are invaluable for debugging workflows. Always check the logs first when + troubleshooting execution issues. ### Fallback Mechanisms @@ -72,40 +73,21 @@ Environment variables provide a secure way to store and access configuration val - Store API credentials securely: - - ``` - OPENAI_API_KEY=sk-... - ANTHROPIC_API_KEY=sk-... - GOOGLE_API_KEY=AIza... - ``` - - These are automatically available to blocks that need them, without hardcoding sensitive values in your workflow. + Store API credentials securely: ``` OPENAI_API_KEY=sk-... ANTHROPIC_API_KEY=sk-... + GOOGLE_API_KEY=AIza... ``` These are automatically available to blocks that need them, without + hardcoding sensitive values in your workflow. - + + + Manage environment-specific configuration: ``` MAX_RETRIES=3 DEFAULT_MODEL=gpt-4o LOG_LEVEL=info + BASE_URL=https://api.example.com ``` These values can be referenced in blocks to control behavior + without modifying the workflow itself. + + - Manage environment-specific configuration: - - ``` - MAX_RETRIES=3 - DEFAULT_MODEL=gpt-4o - LOG_LEVEL=info - BASE_URL=https://api.example.com - ``` - - These values can be referenced in blocks to control behavior without modifying the workflow itself. - - - - Store sensitive information securely: - - ``` - DATABASE_PASSWORD=... - JWT_SECRET=... - ENCRYPTION_KEY=... - ``` - - These values are encrypted at rest and only decrypted during execution, providing an extra layer of security. + Store sensitive information securely: ``` DATABASE_PASSWORD=... JWT_SECRET=... + ENCRYPTION_KEY=... ``` These values are encrypted at rest and only decrypted during execution, + providing an extra layer of security. @@ -115,8 +97,8 @@ Environment variables can be accessed in different ways depending on the block t ```javascript // In Function blocks -const apiKey = process.env.MY_API_KEY; -const maxRetries = parseInt(process.env.MAX_RETRIES || "3"); +const apiKey = process.env.MY_API_KEY +const maxRetries = parseInt(process.env.MAX_RETRIES || '3') // In API blocks (via connection tags) // URL: https://api.example.com?key= @@ -126,7 +108,8 @@ const maxRetries = parseInt(process.env.MAX_RETRIES || "3"); ``` - Never hardcode sensitive information like API keys directly in your workflows. Always use environment variables instead. + Never hardcode sensitive information like API keys directly in your workflows. Always use + environment variables instead. ## Real-Time Monitoring @@ -135,19 +118,28 @@ Sim Studio provides powerful real-time monitoring capabilities: - The currently executing block is highlighted in the workflow editor, making it easy to follow the execution flow in real-time. This visual indicator helps you understand exactly where in your workflow the execution is currently happening. + The currently executing block is highlighted in the workflow editor, making it easy to follow + the execution flow in real-time. This visual indicator helps you understand exactly where in + your workflow the execution is currently happening. - - - Execution logs appear in real-time in the logs panel on the right side. These logs include detailed information about each block's execution, including inputs, outputs, execution time, and any errors that occur. You can use these logs to debug your workflow and understand how data flows between blocks. - - - - Each block's state (pending, executing, completed, or error) is visually indicated in the workflow editor. This helps you quickly identify which blocks have executed successfully and which may have encountered issues. - - + + + Execution logs appear in real-time in the logs panel on the right side. These logs include + detailed information about each block's execution, including inputs, outputs, execution time, and + any errors that occur. You can use these logs to debug your workflow and understand how data flows + between blocks. + + + + Each block's state (pending, executing, completed, or error) is visually indicated in the workflow + editor. This helps you quickly identify which blocks have executed successfully and which may have + encountered issues. + + - Detailed timing information shows how long each block takes to execute, helping you identify performance bottlenecks in your workflow. The execution engine tracks start time, end time, and total duration for both individual blocks and the entire workflow. + Detailed timing information shows how long each block takes to execute, helping you identify + performance bottlenecks in your workflow. The execution engine tracks start time, end time, and + total duration for both individual blocks and the entire workflow. @@ -177,7 +169,8 @@ Optimize your workflows for better performance: - **Monitor Resource Usage**: Keep an eye on memory and CPU usage - Performance optimization is especially important for workflows that run frequently or process large amounts of data. + Performance optimization is especially important for workflows that run frequently or process + large amounts of data. ## Advanced Execution Context @@ -193,22 +186,22 @@ The execution context maintains detailed information about the workflow executio "block-2": { output: { data: { ... } }, status: "completed" }, "block-3": { status: "pending" } }, - + // Active execution path activeExecutionPath: Set(["block-1", "block-2", "block-5"]), - + // Routing decisions decisions: { router: Map(["router-1" => "block-5"]), condition: Map(["condition-1" => "condition-true"]) }, - + // Loop iterations loopIterations: Map(["loop-1" => 2]), - + // Environment variables env: { "API_KEY": "...", "MAX_RETRIES": "3" }, - + // Execution logs logs: [ { blockId: "block-1", timestamp: "...", status: "completed", duration: 120 }, @@ -228,9 +221,9 @@ Advanced techniques for debugging workflow execution: Add strategic console.log statements in Function blocks: ```javascript -console.log("Input to processData:", JSON.stringify(input, null, 2)); -console.log("Processing step 1 complete:", intermediateResult); -console.log("Final result:", finalResult); +console.log('Input to processData:', JSON.stringify(input, null, 2)) +console.log('Processing step 1 complete:', intermediateResult) +console.log('Final result:', finalResult) ``` ### State Inspection @@ -240,18 +233,18 @@ Use Function blocks to inspect the current state: ```javascript function debugState() { // Log all inputs - console.log("All inputs:", input); - + console.log('All inputs:', input) + // Return a debug object with relevant information return { debug: true, inputSummary: { hasUserData: !!input.userBlock, apiStatus: input.apiBlock?.status, - itemCount: input.dataBlock?.items?.length || 0 + itemCount: input.dataBlock?.items?.length || 0, }, - timestamp: new Date().toISOString() - }; + timestamp: new Date().toISOString(), + } } ``` @@ -269,19 +262,19 @@ Identify performance bottlenecks: ```javascript function profileOperation() { - const start = performance.now(); - + const start = performance.now() + // Perform the operation - const result = performExpensiveOperation(); - - const end = performance.now(); - console.log(`Operation took ${end - start}ms`); - + const result = performExpensiveOperation() + + const end = performance.now() + console.log(`Operation took ${end - start}ms`) + return { result, - executionTime: end - start - }; + executionTime: end - start, + } } ``` -By mastering these advanced execution features, you can create more robust, efficient, and sophisticated workflows in Sim Studio. \ No newline at end of file +By mastering these advanced execution features, you can create more robust, efficient, and sophisticated workflows in Sim Studio. diff --git a/apps/docs/content/docs/execution/basics.mdx b/apps/docs/content/docs/execution/basics.mdx new file mode 100644 index 000000000..80cce5bf0 --- /dev/null +++ b/apps/docs/content/docs/execution/basics.mdx @@ -0,0 +1,200 @@ +--- +title: Execution Basics +description: Understanding the fundamental execution flow in Sim Studio +--- + +import { Callout } from 'fumadocs-ui/components/callout' +import { File, Files, Folder } from 'fumadocs-ui/components/files' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' +import { + AgentIcon, + ApiIcon, + ChartBarIcon, + CodeIcon, + ConditionalIcon, + ConnectIcon, +} from '@/components/icons' + +When you run a workflow in Sim Studio, the execution engine follows a systematic process to ensure blocks are executed in the correct order and data flows properly between them. + +## Execution Flow + +The execution of a workflow follows these key steps: + + + + ### Validation Before execution begins, the workflow is validated to ensure it has: - An enabled + starter block with no incoming connections - Properly connected blocks with valid configurations + - No circular dependencies (except in intentional loops) - Valid input and output types between + connected blocks + + + + ### Initialization The execution context is created, which includes: - Environment variables for + the workflow - Input values from the starter block - Initial state for all blocks - Execution path + tracking - Loop iteration counters + + + + ### Block Execution Blocks are executed in topological order (based on dependencies): - The system + identifies which blocks can be executed next - Inputs for each block are resolved from previous + block outputs - Each block is executed by its specialized handler - Outputs are stored in the + execution context + + + + ### Path Determination As execution progresses, the system determines which paths to follow: - + Router and conditional blocks make decisions about execution paths - Only blocks on active paths + are executed - The path tracker maintains the current execution state + + + + ### Result Collection After all blocks have executed: - Final outputs are collected - Execution + logs are compiled - Performance metrics are calculated - Results are presented in the UI + + + +## Block Types and Execution + +Different block types have different execution behaviors: + + + + + Orchestration blocks control the flow of execution through your workflow. + + } + annotation="Initiates workflow execution and provides initial input values. Every workflow must have exactly one starter block." + /> + } + annotation="Directs execution along specific paths based on dynamic decisions. Uses an AI model to select one of multiple possible paths." + /> + } + annotation="Executes different paths based on conditional logic. Evaluates JavaScript expressions to determine which path to follow." + /> + + + + + + + Processing blocks transform data and generate new outputs. + + } + annotation="Interacts with AI models to generate content. Executes prompts against various LLM providers." + /> + } + annotation="Executes custom JavaScript/TypeScript code. Runs in a secure sandbox environment with access to connected block outputs." + /> + } + annotation="Assesses outputs against defined criteria. Uses AI to evaluate content based on custom metrics." + /> + + + + + + + Integration blocks connect with external systems. + + } + annotation="Makes HTTP requests to external services. Configurable with headers, body, and authentication." + /> + } + annotation="Specialized blocks for specific services (Gmail, Slack, GitHub, etc.). Each has its own execution logic for the specific service." + /> + + + + + +## Execution Methods + +Sim Studio offers multiple ways to trigger workflow execution: + +### Manual Execution + +Run workflows on-demand through the Sim Studio interface by clicking the "Run" button. This is perfect for: + +- Testing during development +- One-off tasks +- Workflows that need human supervision + +### Scheduled Execution + +Configure workflows to run automatically on a specified schedule: + +- Set up recurring executions using cron expressions +- Define start times and frequency +- Configure timezone settings +- Set minimum and maximum execution intervals + +### API Endpoints + +Each workflow can be exposed as an API endpoint: + +- Get a unique URL for your workflow +- Configure authentication requirements +- Send custom inputs via POST requests +- Receive execution results as JSON responses + +### Webhooks + +Configure workflows to execute in response to external events: + +- Set up webhook triggers from third-party services +- Process incoming webhook data as workflow input +- Configure webhook security settings +- Support for specialized webhooks (GitHub, Stripe, etc.) + + + The execution method you choose depends on your workflow's purpose. Manual execution is great for + development, while scheduled execution, API endpoints, and webhooks are better for production use + cases. + + +## Execution Context + +Each workflow execution maintains a detailed context that includes: + +- **Block States**: Outputs and execution status of each block +- **Execution Path**: The active path through the workflow +- **Routing Decisions**: Records of which paths were selected +- **Environment Variables**: Configuration values for the workflow +- **Execution Logs**: Detailed records of each step in the execution + +This context is maintained throughout the execution and is used to: + +- Resolve inputs for blocks +- Determine which blocks to execute next +- Track the progress of execution +- Provide debugging information +- Store intermediate results + +## Real-Time Execution Monitoring + +As your workflow executes, you can monitor its progress in real-time: + +- **Active Block Highlighting**: The currently executing block is highlighted +- **Live Logs**: Execution logs appear in real-time in the logs panel +- **Block States**: Visual indicators show each block's execution state +- **Performance Metrics**: Timing information for each block's execution + +These monitoring features help you understand how your workflow is executing and identify any issues that arise. diff --git a/docs/content/docs/execution/index.mdx b/apps/docs/content/docs/execution/index.mdx similarity index 53% rename from docs/content/docs/execution/index.mdx rename to apps/docs/content/docs/execution/index.mdx index 99acd2543..ce7f14e87 100644 --- a/docs/content/docs/execution/index.mdx +++ b/apps/docs/content/docs/execution/index.mdx @@ -3,45 +3,53 @@ title: Execution description: Understand how workflows are executed in Sim Studio --- -import { Card, Cards } from 'fumadocs-ui/components/card' -import { Files, Folder, File } from 'fumadocs-ui/components/files' -import { Step, Steps } from 'fumadocs-ui/components/steps' import { Accordion, Accordions } from 'fumadocs-ui/components/accordion' -import { Tab, Tabs } from 'fumadocs-ui/components/tabs' -import { AgentIcon, ApiIcon, ConditionalIcon, CodeIcon, ChartBarIcon, ConnectIcon, GmailIcon, PerplexityIcon, NotionIcon, ExaAIIcon, FirecrawlIcon, SlackIcon } from '@/components/icons' import { Callout } from 'fumadocs-ui/components/callout' +import { Card, Cards } from 'fumadocs-ui/components/card' +import { File, Files, Folder } from 'fumadocs-ui/components/files' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' +import { + AgentIcon, + ApiIcon, + ChartBarIcon, + CodeIcon, + ConditionalIcon, + ConnectIcon, + ExaAIIcon, + FirecrawlIcon, + GmailIcon, + NotionIcon, + PerplexityIcon, + SlackIcon, +} from '@/components/icons' Sim Studio provides a powerful execution engine that brings your workflows to life. Understanding how execution works will help you design more effective workflows and troubleshoot any issues that arise.
            - +
            - The execution engine handles everything from block execution order to data flow, error handling, and loop management. It ensures your workflows run efficiently and predictably. + The execution engine handles everything from block execution order to data flow, error handling, + and loop management. It ensures your workflows run efficiently and predictably. ## Execution Documentation - Learn about the fundamental execution flow, block types, and how data flows through your workflow + Learn about the fundamental execution flow, block types, and how data flows through your + workflow - - - Master the powerful loop functionality to create iterative processes and feedback mechanisms - - + + + Master the powerful loop functionality to create iterative processes and feedback mechanisms + + - Discover advanced capabilities like error handling, environment variables, and performance optimization + Discover advanced capabilities like error handling, environment variables, and performance + optimization @@ -61,45 +69,36 @@ When you execute a workflow in Sim Studio, the system follows a predictable patt - ### Validation - The workflow is validated to ensure it has an enabled starter block and proper connections. This includes checking that: - - The starter block has no incoming connections - - All required blocks are present and properly connected - - Loop configurations are valid with appropriate iteration limits + ### Validation The workflow is validated to ensure it has an enabled starter block and proper + connections. This includes checking that: - The starter block has no incoming connections - All + required blocks are present and properly connected - Loop configurations are valid with + appropriate iteration limits - + + + ### Initialization The execution context is created with environment variables and input values. + This context maintains the state of the workflow throughout execution, including: - Block outputs + and states - Execution path tracking - Routing decisions - Loop iteration counters + + + + ### Block Execution Blocks are executed in topological order, with each block's outputs feeding + into subsequent blocks. The executor: - Determines the next layer of blocks to execute based on + dependencies - Resolves inputs for each block from previous outputs - Dispatches execution to + specialized handlers for each block type + + + + ### Path Determination Router and conditional blocks make routing decisions that determine which + execution paths to follow. The path tracker: - Updates the active execution path based on these + decisions - Ensures that only blocks on active paths are executed - Handles complex branching + logic in your workflow + + - ### Initialization - The execution context is created with environment variables and input values. This context maintains the state of the workflow throughout execution, including: - - Block outputs and states - - Execution path tracking - - Routing decisions - - Loop iteration counters - - - - ### Block Execution - Blocks are executed in topological order, with each block's outputs feeding into subsequent blocks. The executor: - - Determines the next layer of blocks to execute based on dependencies - - Resolves inputs for each block from previous outputs - - Dispatches execution to specialized handlers for each block type - - - - ### Path Determination - Router and conditional blocks make routing decisions that determine which execution paths to follow. The path tracker: - - Updates the active execution path based on these decisions - - Ensures that only blocks on active paths are executed - - Handles complex branching logic in your workflow - - - - ### Result Collection - The final output and execution logs are collected and presented in the UI. You'll see: - - Complete execution logs for each block - - Performance metrics and timing information - - Any errors that occurred during execution - - The final workflow output + ### Result Collection The final output and execution logs are collected and presented in the UI. + You'll see: - Complete execution logs for each block - Performance metrics and timing + information - Any errors that occurred during execution - The final workflow output @@ -111,23 +110,45 @@ Sim Studio has two main categories of blocks in workflows: Orchestration blocks control the flow of execution through your workflow. - - } annotation="Direct the workflow along specific paths based on dynamic decisions. The router evaluates inputs and selects one of multiple possible paths." /> - } annotation="Execute different paths based on conditional logic. Conditions are evaluated to true or false, determining which path to follow." /> + } + annotation="Direct the workflow along specific paths based on dynamic decisions. The router evaluates inputs and selects one of multiple possible paths." + /> + } + annotation="Execute different paths based on conditional logic. Conditions are evaluated to true or false, determining which path to follow." + /> - + Output blocks perform operations and generate results that can be used by downstream blocks. - - } annotation="Interact with AI models to generate content. Supports various LLM providers with optional tool calling capabilities." /> - } annotation="Execute custom JavaScript/TypeScript code to process data. Runs in a secure sandbox environment with appropriate timeout limits." /> - } annotation="Make HTTP requests to external services. Configure headers, body, and authentication for REST API interactions."/> - } annotation="Assess outputs against defined criteria with customizable scoring logic."/> + } + annotation="Interact with AI models to generate content. Supports various LLM providers with optional tool calling capabilities." + /> + } + annotation="Execute custom JavaScript/TypeScript code to process data. Runs in a secure sandbox environment with appropriate timeout limits." + /> + } + annotation="Make HTTP requests to external services. Configure headers, body, and authentication for REST API interactions." + /> + } + annotation="Assess outputs against defined criteria with customizable scoring logic." + /> @@ -139,19 +160,28 @@ As your workflow executes, Sim Studio provides powerful real-time monitoring cap - The currently executing block is highlighted in the workflow editor, making it easy to follow the execution flow in real-time. This visual indicator helps you understand exactly where in your workflow the execution is currently happening. + The currently executing block is highlighted in the workflow editor, making it easy to follow + the execution flow in real-time. This visual indicator helps you understand exactly where in + your workflow the execution is currently happening. - - - Execution logs appear in real-time in the logs panel on the right side. These logs include detailed information about each block's execution, including inputs, outputs, execution time, and any errors that occur. You can use these logs to debug your workflow and understand how data flows between blocks. - - - - Each block's state (pending, executing, completed, or error) is visually indicated in the workflow editor. This helps you quickly identify which blocks have executed successfully and which may have encountered issues. - - + + + Execution logs appear in real-time in the logs panel on the right side. These logs include + detailed information about each block's execution, including inputs, outputs, execution time, and + any errors that occur. You can use these logs to debug your workflow and understand how data flows + between blocks. + + + + Each block's state (pending, executing, completed, or error) is visually indicated in the workflow + editor. This helps you quickly identify which blocks have executed successfully and which may have + encountered issues. + + - Detailed timing information shows how long each block takes to execute, helping you identify performance bottlenecks in your workflow. The execution engine tracks start time, end time, and total duration for both individual blocks and the entire workflow. + Detailed timing information shows how long each block takes to execute, helping you identify + performance bottlenecks in your workflow. The execution engine tracks start time, end time, and + total duration for both individual blocks and the entire workflow. @@ -161,19 +191,26 @@ Sim Studio offers multiple ways to trigger workflow execution: - Run workflows on-demand through the Sim Studio interface. This is perfect for testing and development, allowing you to iteratively refine your workflow with immediate feedback. + Run workflows on-demand through the Sim Studio interface. This is perfect for testing and + development, allowing you to iteratively refine your workflow with immediate feedback. - - - Configure workflows to run automatically on a specified schedule using cron expressions. Ideal for regular data processing, reporting tasks, or any workflow that needs to run periodically without manual intervention. - - - - Each workflow can be exposed as an API endpoint with authentication, allowing external systems to trigger execution with custom inputs. This enables seamless integration with your existing applications and services. - - + + + Configure workflows to run automatically on a specified schedule using cron expressions. Ideal for + regular data processing, reporting tasks, or any workflow that needs to run periodically without + manual intervention. + + + + Each workflow can be exposed as an API endpoint with authentication, allowing external systems to + trigger execution with custom inputs. This enables seamless integration with your existing + applications and services. + + - Configure workflows to execute in response to external events via webhook triggers. This allows your workflows to react to events from third-party services like GitHub, Stripe, or any platform that supports webhooks. + Configure workflows to execute in response to external events via webhook triggers. This allows + your workflows to react to events from third-party services like GitHub, Stripe, or any platform + that supports webhooks. @@ -226,4 +263,3 @@ Each workflow execution maintains a detailed context that includes: - **Timeout Limits** - Function blocks have configurable timeout limits to prevent long-running operations from blocking execution. By understanding these execution principles, you can design more efficient and effective workflows in Sim Studio. - diff --git a/docs/content/docs/execution/loops.mdx b/apps/docs/content/docs/execution/loops.mdx similarity index 85% rename from docs/content/docs/execution/loops.mdx rename to apps/docs/content/docs/execution/loops.mdx index 8c1483267..328cec442 100644 --- a/docs/content/docs/execution/loops.mdx +++ b/apps/docs/content/docs/execution/loops.mdx @@ -4,21 +4,13 @@ description: Creating iterative processes with loops in Sim Studio --- import { Callout } from 'fumadocs-ui/components/callout' -import { Steps, Step } from 'fumadocs-ui/components/steps' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' Loops are a powerful feature in Sim Studio that allow you to create iterative processes, implement feedback mechanisms, and build more sophisticated workflows.
            - +
            ## What Are Loops? @@ -31,7 +23,8 @@ Loops in Sim Studio allow a group of blocks to execute repeatedly, with each ite - **Conditional Processing**: Continue execution until specific criteria are met - Loops are particularly powerful for AI workflows, allowing you to implement techniques like chain-of-thought reasoning, recursive refinement, and multi-step problem solving. + Loops are particularly powerful for AI workflows, allowing you to implement techniques like + chain-of-thought reasoning, recursive refinement, and multi-step problem solving. ## Creating Loops @@ -49,7 +42,8 @@ To create a loop in your workflow: Configure Loop Settings: Set iteration limits and conditions - Create Feedback Connections: Connect outputs from later blocks back to earlier blocks + Create Feedback Connections: Connect outputs from later blocks back to earlier + blocks @@ -63,7 +57,8 @@ When configuring a loop, you can set several important parameters: - **Minimum Iterations**: The minimum number of times the loop must execute before checking conditions - Always set a reasonable maximum iteration limit to prevent infinite loops. The default limit of 5 iterations is a good starting point for most workflows. + Always set a reasonable maximum iteration limit to prevent infinite loops. The default limit of 5 + iterations is a good starting point for most workflows. ### Loop Conditions @@ -146,12 +141,13 @@ Loops enable powerful workflow patterns: ### Iterative Refinement -
            +

            Example: Content Refinement

            -
            - Create a loop where an Agent block generates content, an Evaluator block assesses it, and a Function block decides whether to continue refining. +
            + Create a loop where an Agent block generates content, an Evaluator block assesses it, and a + Function block decides whether to continue refining.
            -
              +
              1. Agent generates initial content
              2. Evaluator scores the content
              3. Function analyzes score and provides feedback
              4. @@ -162,12 +158,12 @@ Loops enable powerful workflow patterns: ### Batch Processing -
                +

                Example: Data Processing Pipeline

                -
                +
                Process a collection of items one at a time through a series of blocks.
                -
                  +
                  1. Function block extracts the next item from a collection
                  2. Processing blocks operate on the single item
                  3. Results are accumulated in a Memory block
                  4. @@ -177,12 +173,12 @@ Loops enable powerful workflow patterns: ### Recursive Problem Solving -
                    +

                    Example: Multi-step Reasoning

                    -
                    +
                    Implement a recursive approach to complex problem solving.
                    -
                      +
                      1. Agent analyzes the current problem state
                      2. Function block implements a step in the solution
                      3. Condition block checks if the problem is solved
                      4. @@ -201,7 +197,8 @@ To use loops effectively in your workflows: - **Test Thoroughly**: Verify that loops terminate as expected in all scenarios - Loops with many blocks or complex operations can impact performance. Consider optimizing individual blocks if your loops need many iterations. + Loops with many blocks or complex operations can impact performance. Consider optimizing + individual blocks if your loops need many iterations. ## Loop Debugging @@ -214,4 +211,4 @@ When debugging loops in your workflows: - **Use Console Logging**: Add console.log statements in Function blocks to track loop progress - **Monitor Memory Usage**: Watch for growing data structures that might cause performance issues -By mastering loops, you can create much more sophisticated and powerful workflows in Sim Studio. \ No newline at end of file +By mastering loops, you can create much more sophisticated and powerful workflows in Sim Studio. diff --git a/docs/content/docs/execution/meta.json b/apps/docs/content/docs/execution/meta.json similarity index 97% rename from docs/content/docs/execution/meta.json rename to apps/docs/content/docs/execution/meta.json index 4d3a531c3..b7738fded 100644 --- a/docs/content/docs/execution/meta.json +++ b/apps/docs/content/docs/execution/meta.json @@ -1,4 +1,4 @@ { "title": "Execution", "pages": ["basics", "loops", "advanced"] -} \ No newline at end of file +} diff --git a/apps/docs/content/docs/getting-started/index.mdx b/apps/docs/content/docs/getting-started/index.mdx new file mode 100644 index 000000000..08f9615a9 --- /dev/null +++ b/apps/docs/content/docs/getting-started/index.mdx @@ -0,0 +1,101 @@ +--- +title: Getting Started +description: Build, test, and optimize your agentic workflows +--- + +import { Callout } from 'fumadocs-ui/components/callout' +import { Card, Cards } from 'fumadocs-ui/components/card' +import { File, Files, Folder } from 'fumadocs-ui/components/files' +import { Step, Steps } from 'fumadocs-ui/components/steps' +import { Tab, Tabs } from 'fumadocs-ui/components/tabs' +import { + AgentIcon, + ApiIcon, + ChartBarIcon, + CodeIcon, + ConditionalIcon, + ConnectIcon, + ExaAIIcon, + FirecrawlIcon, + GmailIcon, + NotionIcon, + PerplexityIcon, + SlackIcon, +} from '@/components/icons' + +Sim Studio is a powerful, user-friendly platform for building, testing, and optimizing your agentic workflows. This documentation will help you understand how to use the various components of Sim Studio to create sophisticated agent-based applications. + + + This guide will walk you through the essential concepts and help you get started building your + first workflow. + + +## Core Components + +Sim Studio is built around two primary components: + +### Blocks + +Blocks are the fundamental building elements of your workflows. Each block serves a specific purpose: + + + } + annotation="Create AI agents using any LLM provider" + /> + } + annotation="Connect to external services and APIs" + /> + } + annotation="Add conditional branching to your workflows" + /> + } + annotation="Execute custom JavaScript/TypeScript code" + /> + } + annotation="Assess responses against defined criteria" + /> + } + annotation="Direct workflow execution based on input analysis" + /> + + +### Tools + +Tools extend the capabilities of agents. They provide additional functionality for agents by enabling you to interface with your favorite data sources and take action (e.g posting on X, sending an email) + + + } /> + } /> + } /> + } /> + } /> + } /> + + +## Getting Started + + + + Start by creating a new workflow in the Sim Studio dashboard. + + Drag and drop a block from the sidebar onto the canvas. + + Set up the block's parameters and inputs according to your needs. + + + Create connections between blocks to define the flow of data and execution. + + Run your workflow with test inputs to verify its behavior. + diff --git a/docs/content/docs/introduction/index.mdx b/apps/docs/content/docs/introduction/index.mdx similarity index 91% rename from docs/content/docs/introduction/index.mdx rename to apps/docs/content/docs/introduction/index.mdx index 0e0d1398b..a86d00612 100644 --- a/docs/content/docs/introduction/index.mdx +++ b/apps/docs/content/docs/introduction/index.mdx @@ -4,7 +4,7 @@ description: The UI for agents --- import { Card, Cards } from 'fumadocs-ui/components/card' -import { Files, Folder, File } from 'fumadocs-ui/components/files' +import { File, Files, Folder } from 'fumadocs-ui/components/files' import { Features } from '@/components/ui/features' Sim Studio is a powerful platform for building, testing, and optimizing agentic workflows. It provides developers with intuitive tools to design sophisticated agent-based applications through a visual interface. Whether you're prototyping a simple AI assistant or building complex multi-agent systems, Sim Studio offers the flexibility and performance needed for modern AI applications. @@ -29,16 +29,12 @@ Existing frameworks abstract away provider-specific features, forcing developers Sim Studio stays close to provider definitions, directly exposing the parameters that matter: -
                          +
                          • System prompts and instructions with native formatting
                          • -
                          • - Tool definitions and access patterns that match provider implementations -
                          • +
                          • Tool definitions and access patterns that match provider implementations
                          • Temperature and sampling parameters with their full range of options
                          • Structured output formatting that aligns with provider capabilities
                          • -
                          • - Model selection and configuration with provider-specific optimizations -
                          • +
                          • Model selection and configuration with provider-specific optimizations
                          This approach gives you full control over agent behavior without unnecessary complexity. You leverage each provider's full capabilities without sacrificing the convenience of a unified platform. @@ -67,14 +63,9 @@ Existing solutions provide limited visibility into agent performance, making it Sim Studio provides full visibility into agent performance with integrated observability: -
                            -
                          • - Detailed execution logs capturing every interaction between agents and - models -
                          • -
                          • - Latency tracing with span visualization to identify performance bottlenecks -
                          • +
                              +
                            • Detailed execution logs capturing every interaction between agents and models
                            • +
                            • Latency tracing with span visualization to identify performance bottlenecks
                            • Cost tracking and optimization to prevent budget overruns
                            • Error analysis and debugging tools for complex workflows
                            • Performance comparisons across different model configurations
                            • diff --git a/docs/content/docs/introduction/meta.json b/apps/docs/content/docs/introduction/meta.json similarity index 96% rename from docs/content/docs/introduction/meta.json rename to apps/docs/content/docs/introduction/meta.json index 064d490e3..3963f661d 100644 --- a/docs/content/docs/introduction/meta.json +++ b/apps/docs/content/docs/introduction/meta.json @@ -1,4 +1,4 @@ { "title": "Introduction", "pages": ["index"] -} \ No newline at end of file +} diff --git a/docs/content/docs/meta.json b/apps/docs/content/docs/meta.json similarity index 100% rename from docs/content/docs/meta.json rename to apps/docs/content/docs/meta.json diff --git a/docs/content/docs/tools/airtable.mdx b/apps/docs/content/docs/tools/airtable.mdx similarity index 56% rename from docs/content/docs/tools/airtable.mdx rename to apps/docs/content/docs/tools/airtable.mdx index 6778eeb29..7e864890b 100644 --- a/docs/content/docs/tools/airtable.mdx +++ b/apps/docs/content/docs/tools/airtable.mdx @@ -3,9 +3,9 @@ title: Airtable description: Read, create, and update Airtable --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' - + iconSvg={` - - + + C195.835999,149.785568 198.051407,147.877594 200.266830,145.969620 z" + /> `} /> @@ -194,13 +193,10 @@ With Clay, you can: In Sim Studio, the Clay integration allows your agents to push structured data into Clay tables via webhooks. This makes it easy to collect, enrich, and manage dynamic outputs such as leads, research summaries, or action itemsβ€”all in a collaborative, spreadsheet-like interface. Your agents can populate rows in real time, enabling asynchronous workflows where AI-generated insights are captured, reviewed, and used by your team. Whether you're automating research, enriching CRM data, or tracking operational outcomes, Clay becomes a living data layer that interacts intelligently with your agents. By connecting Sim Studio with Clay, you gain a powerful way to operationalize agent results, loop over datasets with precision, and maintain a clean, auditable record of AI-driven work. {/* MANUAL-CONTENT-END */} - ## Usage Instructions Populate Clay workbook with data using a JSON or plain text. Enables direct communication and notifications with channel confirmation. - - ## Tools ### `clay_populate` @@ -209,37 +205,32 @@ Populate Clay with data from a JSON file. Enables direct communication and notif #### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `webhookURL` | string | Yes | The webhook URL to populate | -| `data` | json | Yes | The data to populate | -| `authToken` | string | No | Optional auth token for WebhookURL | +| Parameter | Type | Required | Description | +| ------------ | ------ | -------- | ---------------------------------- | +| `webhookURL` | string | Yes | The webhook URL to populate | +| `data` | json | Yes | The data to populate | +| `authToken` | string | No | Optional auth token for WebhookURL | #### Output -| Parameter | Type | -| --------- | ---- | -| `data` | string | - - +| Parameter | Type | +| --------- | ------ | +| `data` | string | ## Block Configuration ### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `authToken` | string | Yes | Auth Token - Enter your Clay Auth token | - - +| Parameter | Type | Required | Description | +| ----------- | ------ | -------- | --------------------------------------- | +| `authToken` | string | Yes | Auth Token - Enter your Clay Auth token | ### Outputs -| Output | Type | Description | -| ------ | ---- | ----------- | +| Output | Type | Description | +| ---------- | ------ | -------------------- | | `response` | object | Output from response | -| ↳ `data` | any | data of the response | - +| ↳ `data` | any | data of the response | ## Notes diff --git a/docs/content/docs/tools/confluence.mdx b/apps/docs/content/docs/tools/confluence.mdx similarity index 53% rename from docs/content/docs/tools/confluence.mdx rename to apps/docs/content/docs/tools/confluence.mdx index ccc0ccf8b..fe766ba0d 100644 --- a/docs/content/docs/tools/confluence.mdx +++ b/apps/docs/content/docs/tools/confluence.mdx @@ -3,9 +3,9 @@ title: Confluence description: Interact with Confluence --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' - - - + iconSvg={` + + `} /> @@ -29,13 +36,10 @@ With ElevenLabs, you can: In Sim Studio, the ElevenLabs integration enables your agents to convert text to lifelike speech, enhancing the interactivity and engagement of your applications. This is particularly valuable for creating voice assistants, generating audio content, developing accessible applications, or building conversational interfaces that feel more human. The integration allows you to seamlessly incorporate ElevenLabs' advanced speech synthesis capabilities into your agent workflows, bridging the gap between text-based AI and natural human communication. {/* MANUAL-CONTENT-END */} - ## Usage Instructions Generate realistic speech from text using ElevenLabs voices. - - ## Tools ### `elevenlabs_tts` @@ -44,39 +48,34 @@ Convert TTS using ElevenLabs voices #### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `apiKey` | string | Yes | Your ElevenLabs API key | -| `text` | string | Yes | The text to convert to speech | -| `voiceId` | string | Yes | The ID of the voice to use | -| `modelId` | string | No | The ID of the model to use \(defaults to eleven_monolingual_v1\) | +| Parameter | Type | Required | Description | +| --------- | ------ | -------- | ---------------------------------------------------------------- | +| `apiKey` | string | Yes | Your ElevenLabs API key | +| `text` | string | Yes | The text to convert to speech | +| `voiceId` | string | Yes | The ID of the voice to use | +| `modelId` | string | No | The ID of the model to use \(defaults to eleven_monolingual_v1\) | #### Output -| Parameter | Type | -| --------- | ---- | +| Parameter | Type | +| ---------- | ------ | | `audioUrl` | string | - - ## Block Configuration ### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `text` | string | No | Text - Enter the text to convert to speech | - - +| Parameter | Type | Required | Description | +| --------- | ------ | -------- | ------------------------------------------ | +| `text` | string | No | Text - Enter the text to convert to speech | ### Outputs -| Output | Type | Description | -| ------ | ---- | ----------- | -| `response` | object | Output from response | +| Output | Type | Description | +| ------------ | ------ | ------------------------ | +| `response` | object | Output from response | | ↳ `audioUrl` | string | audioUrl of the response | - ## Notes - Category: `tools` diff --git a/docs/content/docs/tools/exa.mdx b/apps/docs/content/docs/tools/exa.mdx similarity index 72% rename from docs/content/docs/tools/exa.mdx rename to apps/docs/content/docs/tools/exa.mdx index c2615a199..32267ea5d 100644 --- a/docs/content/docs/tools/exa.mdx +++ b/apps/docs/content/docs/tools/exa.mdx @@ -3,9 +3,9 @@ title: Exa description: Search with Exa AI --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' - + + `} +/> + +{/* MANUAL-CONTENT-START:intro */} +[GitHub](https://github.com/) is the world's leading platform for software development and version control using Git. It provides a collaborative environment where developers can host and review code, manage projects, and build software together. + +With GitHub, you can: + +- **Host repositories**: Store your code in public or private repositories with version control +- **Collaborate on code**: Use pull requests to propose changes, review code, and merge contributions +- **Track issues**: Create, assign, and manage issues to organize work and track bugs +- **Automate workflows**: Use GitHub Actions to build, test, and deploy code automatically +- **Manage projects**: Organize work with project boards, milestones, and task tracking +- **Document code**: Create and maintain documentation with GitHub Pages and wikis + +In Sim Studio, the GitHub integration enables your agents to interact directly with GitHub repositories and workflows. This allows for powerful automation scenarios such as code review assistance, pull request management, issue tracking, and repository exploration. Your agents can fetch repository data, analyze code changes, post comments on pull requests, and perform other GitHub operations programmatically. This integration bridges the gap between your AI workflows and your development processes, enabling seamless collaboration between your agents and your development team. +{/* MANUAL-CONTENT-END */} + +## Usage Instructions + +Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow. + +## Tools + +### `github_pr` + +Fetch PR details including diff and files changed + +#### Input + +| Parameter | Type | Required | Description | +| ------------ | ------ | -------- | ------------------- | +| `owner` | string | Yes | Repository owner | +| `repo` | string | Yes | Repository name | +| `pullNumber` | number | Yes | Pull request number | +| `apiKey` | string | Yes | GitHub API token | + +#### Output + +| Parameter | Type | +| ------------ | ------ | +| `metadata` | string | +| `title` | string | +| `state` | string | +| `html_url` | string | +| `diff_url` | string | +| `created_at` | string | +| `updated_at` | string | +| `files` | string | +| `additions` | string | +| `deletions` | string | +| `changes` | string | +| `patch` | string | +| `blob_url` | string | +| `raw_url` | string | +| `status` | string | + +### `github_comment` + +Create comments on GitHub PRs + +#### Input + +| Parameter | Type | Required | Description | +| ------------- | ------ | -------- | ---------------------------------------------- | +| `owner` | string | Yes | Repository owner | +| `repo` | string | Yes | Repository name | +| `pullNumber` | number | Yes | Pull request number | +| `body` | string | Yes | Comment content | +| `path` | string | No | File path for review comment | +| `position` | number | No | Line number for review comment | +| `apiKey` | string | Yes | GitHub API token | +| `commentType` | string | No | Type of comment \(pr_comment or file_comment\) | +| `line` | number | No | Line number for review comment | +| `side` | string | No | Side of the diff \(LEFT or RIGHT\) | +| `commitId` | string | No | The SHA of the commit to comment on | + +#### Output + +| Parameter | Type | +| ------------ | ------ | +| `metadata` | string | +| `html_url` | string | +| `created_at` | string | +| `updated_at` | string | +| `path` | string | +| `line` | string | +| `side` | string | +| `commit_id` | string | + +### `github_repo_info` + +Retrieve comprehensive GitHub repository metadata including stars, forks, issues, and primary language. Supports both public and private repositories with optional authentication. + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ------ | -------- | ----------------------------------------- | +| `owner` | string | Yes | Repository owner \(user or organization\) | +| `repo` | string | Yes | Repository name | +| `apiKey` | string | Yes | GitHub Personal Access Token | + +#### Output + +| Parameter | Type | +| ------------- | ------ | +| `metadata` | string | +| `description` | string | +| `stars` | string | +| `forks` | string | +| `openIssues` | string | +| `language` | string | + +### `github_latest_commit` + +Retrieve the latest commit from a GitHub repository + +#### Input + +| Parameter | Type | Required | Description | +| --------- | ------ | -------- | ----------------------------------------- | +| `owner` | string | Yes | Repository owner \(user or organization\) | +| `repo` | string | Yes | Repository name | +| `branch` | string | No | Branch name \(defaults to the repository | +| `apiKey` | string | Yes | GitHub API token | + +#### Output + +| Parameter | Type | +| ---------------- | ------ | +| `metadata` | string | +| `html_url` | string | +| `commit_message` | string | +| `author` | string | +| `login` | string | +| `avatar_url` | string | + +## Block Configuration + +### Input + +| Parameter | Type | Required | Description | +| ----------- | ------ | -------- | ----------- | +| `operation` | string | Yes | Operation | + +### Outputs + +| Output | Type | Description | +| ------------ | ------ | ------------------------ | +| `response` | object | Output from response | +| ↳ `content` | string | content of the response | +| ↳ `metadata` | json | metadata of the response | + +## Notes + +- Category: `tools` +- Type: `github` diff --git a/docs/content/docs/tools/gmail.mdx b/apps/docs/content/docs/tools/gmail.mdx similarity index 61% rename from docs/content/docs/tools/gmail.mdx rename to apps/docs/content/docs/tools/gmail.mdx index 803bd3016..50b19ab16 100644 --- a/docs/content/docs/tools/gmail.mdx +++ b/apps/docs/content/docs/tools/gmail.mdx @@ -3,9 +3,9 @@ title: Gmail description: Send Gmail --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' - - - - + d="M43.611,20.083H42V20H24v8h11.303c-1.649,4.657-6.08,8-11.303,8c-6.627,0-12-5.373-12-12 s5.373-12,12-12c3.059,0,5.842,1.154,7.961,3.039l5.657-5.657C34.046,6.053,29.268,4,24,4C12.955,4,4,12.955,4,24s8.955,20,20,20 s20-8.955,20-20C44,22.659,43.862,21.35,43.611,20.083z" + /> + + + `} /> @@ -43,13 +43,10 @@ With Google Search, you can: In Sim Studio, the Google Search integration enables your agents to search the web programmatically and incorporate search results into their workflows. This allows for powerful automation scenarios such as research, fact-checking, data gathering, and information synthesis. Your agents can formulate search queries, retrieve relevant results, and extract information from those results to make decisions or generate insights. This integration bridges the gap between your AI workflows and the vast information available on the web, enabling your agents to access up-to-date information from across the internet. By connecting Sim Studio with Google Search, you can create agents that stay informed with the latest information, verify facts, conduct research, and provide users with relevant web content - all without leaving your workflow. {/* MANUAL-CONTENT-END */} - ## Usage Instructions Searches the web using the Google Custom Search API, which provides high-quality search results from the entire internet or a specific site defined by a custom search engine ID. - - ## Tools ### `google_search` @@ -58,34 +55,30 @@ Search the web with the Custom Search API #### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `query` | string | Yes | The search query to execute | -| `apiKey` | string | Yes | Google API key | -| `searchEngineId` | string | Yes | Custom Search Engine ID | -| `num` | string | No | Number of results to return \(default: 10, max: 10\) | +| Parameter | Type | Required | Description | +| ---------------- | ------ | -------- | ---------------------------------------------------- | +| `query` | string | Yes | The search query to execute | +| `apiKey` | string | Yes | Google API key | +| `searchEngineId` | string | Yes | Custom Search Engine ID | +| `num` | string | No | Number of results to return \(default: 10, max: 10\) | #### Output -| Parameter | Type | -| --------- | ---- | -| `items` | string | -| `searchInformation` | string | -| `searchTime` | string | -| `formattedSearchTime` | string | +| Parameter | Type | +| ----------------------- | ------ | +| `items` | string | +| `searchInformation` | string | +| `searchTime` | string | +| `formattedSearchTime` | string | | `formattedTotalResults` | string | - - ## Block Configuration ### Input -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `query` | string | No | Search Query - Enter your search query | - - +| Parameter | Type | Required | Description | +| --------- | ------ | -------- | -------------------------------------- | +| `query` | string | No | Search Query - Enter your search query | ### Outputs diff --git a/apps/docs/content/docs/tools/google_sheets.mdx b/apps/docs/content/docs/tools/google_sheets.mdx new file mode 100644 index 000000000..f8c3e7e1e --- /dev/null +++ b/apps/docs/content/docs/tools/google_sheets.mdx @@ -0,0 +1,172 @@ +--- +title: Google Sheets +description: Read, write, and update data +--- + +import { BlockInfoCard } from '@/components/ui/block-info-card' + + + + + + + `} +/> + +{/* MANUAL-CONTENT-START:intro */} +[Google Sheets](https://sheets.google.com) is a powerful cloud-based spreadsheet application that allows users to create, edit, and collaborate on spreadsheets in real-time. As part of Google's productivity suite, Google Sheets offers a versatile platform for data organization, analysis, and visualization with robust formatting, formula, and sharing capabilities. + +With Google Sheets, you can: + +- **Create and edit spreadsheets**: Develop data-driven documents with comprehensive formatting and calculation options +- **Collaborate in real-time**: Work simultaneously with multiple users on the same spreadsheet +- **Analyze data**: Use formulas, functions, and pivot tables to process and understand your data +- **Visualize information**: Create charts, graphs, and conditional formatting to represent data visually +- **Access anywhere**: Use Google Sheets across devices with automatic cloud synchronization +- **Work offline**: Continue working without internet connection with changes syncing when back online +- **Integrate with other services**: Connect with Google Drive, Forms, and third-party applications + +In Sim Studio, the Google Sheets integration enables your agents to interact directly with spreadsheet data programmatically. This allows for powerful automation scenarios such as data extraction, analysis, reporting, and management. Your agents can read existing spreadsheets to extract information, write to spreadsheets to update data, and create new spreadsheets from scratch. This integration bridges the gap between your AI workflows and data management, enabling seamless interaction with structured data. By connecting Sim Studio with Google Sheets, you can automate data workflows, generate reports, extract insights from data, and maintain up-to-date information - all through your intelligent agents. The integration supports various data formats and range specifications, making it flexible enough to handle diverse data management needs while maintaining the collaborative and accessible nature of Google Sheets. +{/* MANUAL-CONTENT-END */} + +## Usage Instructions + +Integrate Google Sheets functionality to manage spreadsheet data. Read data from specific ranges, write new data, update existing cells, and append data to the end of sheets using OAuth authentication. Supports various input and output formats for flexible data handling. + +## Tools + +### `google_sheets_read` + +Read data from a Google Sheets spreadsheet + +#### Input + +| Parameter | Type | Required | Description | +| --------------- | ------ | -------- | ------------------------------------------ | +| `accessToken` | string | Yes | The access token for the Google Sheets API | +| `spreadsheetId` | string | Yes | The ID of the spreadsheet to read from | +| `range` | string | No | The range of cells to read from | + +#### Output + +| Parameter | Type | +| --------- | ---- | +| `data` | json | + +### `google_sheets_write` + +Write data to a Google Sheets spreadsheet + +#### Input + +| Parameter | Type | Required | Description | +| ------------------------- | ------- | -------- | ----------------------------------------------------- | +| `accessToken` | string | Yes | The access token for the Google Sheets API | +| `spreadsheetId` | string | Yes | The ID of the spreadsheet to write to | +| `range` | string | No | The range of cells to write to | +| `values` | array | Yes | The data to write to the spreadsheet | +| `valueInputOption` | string | No | The format of the data to write | +| `includeValuesInResponse` | boolean | No | Whether to include the written values in the response | + +#### Output + +| Parameter | Type | +| ---------------- | ------ | +| `updatedRange` | string | +| `updatedRows` | string | +| `updatedColumns` | string | +| `updatedCells` | string | +| `metadata` | string | +| `spreadsheetId` | string | +| `spreadsheetUrl` | string | + +### `google_sheets_update` + +Update data in a Google Sheets spreadsheet + +#### Input + +| Parameter | Type | Required | Description | +| ------------------------- | ------- | -------- | ----------------------------------------------------- | +| `accessToken` | string | Yes | The access token for the Google Sheets API | +| `spreadsheetId` | string | Yes | The ID of the spreadsheet to update | +| `range` | string | No | The range of cells to update | +| `values` | array | Yes | The data to update in the spreadsheet | +| `valueInputOption` | string | No | The format of the data to update | +| `includeValuesInResponse` | boolean | No | Whether to include the updated values in the response | + +#### Output + +| Parameter | Type | +| ---------------- | ------ | +| `updatedRange` | string | +| `updatedRows` | string | +| `updatedColumns` | string | +| `updatedCells` | string | +| `metadata` | string | +| `spreadsheetId` | string | +| `spreadsheetUrl` | string | + +### `google_sheets_append` + +Append data to the end of a Google Sheets spreadsheet + +#### Input + +| Parameter | Type | Required | Description | +| ------------------------- | ------- | -------- | ------------------------------------------------------ | +| `accessToken` | string | Yes | The access token for the Google Sheets API | +| `spreadsheetId` | string | Yes | The ID of the spreadsheet to append to | +| `range` | string | No | The range of cells to append after | +| `values` | array | Yes | The data to append to the spreadsheet | +| `valueInputOption` | string | No | The format of the data to append | +| `insertDataOption` | string | No | How to insert the data \(OVERWRITE or INSERT_ROWS\) | +| `includeValuesInResponse` | boolean | No | Whether to include the appended values in the response | + +#### Output + +| Parameter | Type | +| --------- | ---- | +| `data` | json | + +## Block Configuration + +### Input + +| Parameter | Type | Required | Description | +| ----------- | ------ | -------- | ----------- | +| `operation` | string | Yes | Operation | + +### Outputs + +| Output | Type | Description | +| ------------------ | ------ | ------------------------------ | +| `response` | object | Output from response | +| ↳ `data` | json | data of the response | +| ↳ `metadata` | json | metadata of the response | +| ↳ `updatedRange` | string | updatedRange of the response | +| ↳ `updatedRows` | number | updatedRows of the response | +| ↳ `updatedColumns` | number | updatedColumns of the response | +| ↳ `updatedCells` | number | updatedCells of the response | +| ↳ `tableRange` | string | tableRange of the response | + +## Notes + +- Category: `tools` +- Type: `google_sheets` diff --git a/docs/content/docs/tools/guesty.mdx b/apps/docs/content/docs/tools/guesty.mdx similarity index 70% rename from docs/content/docs/tools/guesty.mdx rename to apps/docs/content/docs/tools/guesty.mdx index ef7910e94..6d3504569 100644 --- a/docs/content/docs/tools/guesty.mdx +++ b/apps/docs/content/docs/tools/guesty.mdx @@ -3,9 +3,9 @@ title: Guesty description: Interact with Guesty property management system --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' - - As Standalone Blocks: Tools can be added as individual blocks on the canvas when you need deterministic, direct access to their functionality. This gives you precise control over when and how the tool is called. + As Standalone Blocks: Tools can be added as individual blocks on the canvas + when you need deterministic, direct access to their functionality. This gives you precise + control over when and how the tool is called. - As Agent Tools: Tools can be added to Agent blocks by clicking "Add tools" and configuring the required parameters. This allows agents to dynamically choose which tools to use based on the context and requirements of the task. + As Agent Tools: Tools can be added to Agent blocks by clicking "Add tools" and + configuring the required parameters. This allows agents to dynamically choose which tools to use + based on the context and requirements of the task. @@ -57,4 +61,4 @@ Tools typically return structured data that can be processed by subsequent block - Metadata about the operation - Status information -Refer to each tool's specific documentation to understand its exact output format. \ No newline at end of file +Refer to each tool's specific documentation to understand its exact output format. diff --git a/docs/content/docs/tools/jina.mdx b/apps/docs/content/docs/tools/jina.mdx similarity index 94% rename from docs/content/docs/tools/jina.mdx rename to apps/docs/content/docs/tools/jina.mdx index 6c994ac5c..51ff42676 100644 --- a/docs/content/docs/tools/jina.mdx +++ b/apps/docs/content/docs/tools/jina.mdx @@ -3,9 +3,9 @@ title: Jina description: Convert website content into text --- -import { BlockInfoCard } from "@/components/ui/block-info-card" +import { BlockInfoCard } from '@/components/ui/block-info-card' -
                            @@ -200,19 +210,20 @@ export default function TermsOfService() {

                            7. Disclaimer

                            - Your use of the Service is at your sole risk. The Service is provided on an "AS IS" - and "AS AVAILABLE" basis. The Service is provided without warranties of any kind, - whether express or implied, including, but not limited to, implied warranties of - merchantability, fitness for a particular purpose, non-infringement or course of - performance. + Your use of the Service is at your sole risk. The Service is provided on an "AS + IS" and "AS AVAILABLE" basis. The Service is provided without warranties of any + kind, whether express or implied, including, but not limited to, implied + warranties of merchantability, fitness for a particular purpose, non-infringement + or course of performance.

                            - Sim Studio, Inc, its subsidiaries, affiliates, and its licensors do not warrant that: + Sim Studio, Inc, its subsidiaries, affiliates, and its licensors do not warrant + that:

                            • - The Service will function uninterrupted, secure or available at any particular time - or location; + The Service will function uninterrupted, secure or available at any particular + time or location;
                            • Any errors or defects will be corrected;
                            • The Service is free of viruses or other harmful components; or
                            • @@ -223,14 +234,14 @@ export default function TermsOfService() {

                              8. Governing Law

                              - These Terms shall be governed and construed in accordance with the laws of the United - States, without regard to its conflict of law provisions. + These Terms shall be governed and construed in accordance with the laws of the + United States, without regard to its conflict of law provisions.

                              - Our failure to enforce any right or provision of these Terms will not be considered a - waiver of those rights. If any provision of these Terms is held to be invalid or - unenforceable by a court, the remaining provisions of these Terms will remain in - effect. + Our failure to enforce any right or provision of these Terms will not be + considered a waiver of those rights. If any provision of these Terms is held to be + invalid or unenforceable by a court, the remaining provisions of these Terms will + remain in effect.

                              @@ -239,18 +250,19 @@ export default function TermsOfService() {

                              Please read the following arbitration agreement carefully. It requires you to arbitrate disputes with Sim Studio, Inc, its parent companies, subsidiaries, - affiliates, successors and assigns and all of their respective officers, directors, - employees, agents, and representatives (collectively, the "Company Parties") and - limits the manner in which you can seek relief from the Company Parties. + affiliates, successors and assigns and all of their respective officers, + directors, employees, agents, and representatives (collectively, the{' '} + "Company Parties") and limits the manner + in which you can seek relief from the Company Parties.

                              - You agree that any dispute between you and any of the Company Parties relating to the - Site, the Service or these Terms will be resolved by binding arbitration, rather than - in court, except that (1) you and the Company Parties may assert individualized claims - in small claims court if the claims qualify, remain in such court and advance solely - on an individual, non-class basis; and (2) you or the Company Parties may seek - equitable relief in court for infringement or other misuse of intellectual property - rights. + You agree that any dispute between you and any of the Company Parties relating to + the Site, the Service or these Terms will be resolved by binding arbitration, + rather than in court, except that (1) you and the Company Parties may assert + individualized claims in small claims court if the claims qualify, remain in such + court and advance solely on an individual, non-class basis; and (2) you or the + Company Parties may seek equitable relief in court for infringement or other + misuse of intellectual property rights.

                              The Federal Arbitration Act governs the interpretation and enforcement of this @@ -258,14 +270,20 @@ export default function TermsOfService() { alternative dispute resolution provider.

                              - YOU AND COMPANY AGREE THAT EACH OF US MAY BRING CLAIMS AGAINST THE OTHER ONLY ON AN - INDIVIDUAL BASIS AND NOT ON A CLASS, REPRESENTATIVE, OR COLLECTIVE BASIS. ONLY - INDIVIDUAL RELIEF IS AVAILABLE, AND DISPUTES OF MORE THAN ONE CUSTOMER OR USER CANNOT - BE ARBITRATED OR CONSOLIDATED WITH THOSE OF ANY OTHER CUSTOMER OR USER. + YOU AND COMPANY AGREE THAT EACH OF US MAY BRING CLAIMS AGAINST THE OTHER ONLY ON + AN INDIVIDUAL BASIS AND NOT ON A CLASS, REPRESENTATIVE, OR COLLECTIVE BASIS. ONLY + INDIVIDUAL RELIEF IS AVAILABLE, AND DISPUTES OF MORE THAN ONE CUSTOMER OR USER + CANNOT BE ARBITRATED OR CONSOLIDATED WITH THOSE OF ANY OTHER CUSTOMER OR USER.

                              You have the right to opt out of the provisions of this Arbitration Agreement by - sending a timely written notice of your decision to opt out to: legal@simstudio.ai + sending a timely written notice of your decision to opt out to:{' '} + + legal@simstudio.ai{' '} + within 30 days after first becoming subject to this Arbitration Agreement.

                            @@ -273,45 +291,53 @@ export default function TermsOfService() {

                            10. Changes to Terms

                            - We reserve the right, at our sole discretion, to modify or replace these Terms at any - time. If a revision is material, we will try to provide at least 30 days' notice prior - to any new terms taking effect. What constitutes a material change will be determined - at our sole discretion. + We reserve the right, at our sole discretion, to modify or replace these Terms at + any time. If a revision is material, we will try to provide at least 30 days' + notice prior to any new terms taking effect. What constitutes a material change + will be determined at our sole discretion.

                            - By continuing to access or use our Service after those revisions become effective, you - agree to be bound by the revised terms. If you do not agree to the new terms, please - stop using the Service. + By continuing to access or use our Service after those revisions become effective, + you agree to be bound by the revised terms. If you do not agree to the new terms, + please stop using the Service.

                            11. Copyright Policy

                            - We respect the intellectual property of others and ask that users of our Service do - the same. If you believe that one of our users is, through the use of our Service, - unlawfully infringing the copyright(s) in a work, please send a notice to our - designated Copyright Agent, including the following information: + We respect the intellectual property of others and ask that users of our Service + do the same. If you believe that one of our users is, through the use of our + Service, unlawfully infringing the copyright(s) in a work, please send a notice to + our designated Copyright Agent, including the following information:

                            • Your physical or electronic signature;
                            • Identification of the copyrighted work(s) that you claim to have been infringed;
                            • -
                            • Identification of the material on our services that you claim is infringing;
                            • +
                            • + Identification of the material on our services that you claim is infringing; +
                            • Your address, telephone number, and e-mail address;
                            • A statement that you have a good-faith belief that the disputed use is not authorized by the copyright owner, its agent, or the law; and
                            • - A statement, made under the penalty of perjury, that the above information in your - notice is accurate and that you are the copyright owner or authorized to act on the - copyright owner's behalf. + A statement, made under the penalty of perjury, that the above information in + your notice is accurate and that you are the copyright owner or authorized to + act on the copyright owner's behalf.

                            - Our Copyright Agent can be reached at: copyright@simstudio.ai + Our Copyright Agent can be reached at:{' '} + + copyright@simstudio.ai +

                            @@ -319,7 +345,10 @@ export default function TermsOfService() {

                            12. Contact Us

                            If you have any questions about these Terms, please contact us at:{' '} - + legal@simstudio.ai

                            @@ -328,10 +357,10 @@ export default function TermsOfService() {
                    - + {/* Footer */}
                    -
                    +
                    ) diff --git a/sim/app/admin/page.tsx b/apps/sim/app/admin/page.tsx similarity index 94% rename from sim/app/admin/page.tsx rename to apps/sim/app/admin/page.tsx index 97f3d7de2..b38c7ef6f 100644 --- a/sim/app/admin/page.tsx +++ b/apps/sim/app/admin/page.tsx @@ -10,10 +10,10 @@ export default function AdminPage() { Manage Sim Studio platform settings and users.

                - +
                -

                Waitlist Management

                diff --git a/sim/app/admin/password-auth.tsx b/apps/sim/app/admin/password-auth.tsx similarity index 100% rename from sim/app/admin/password-auth.tsx rename to apps/sim/app/admin/password-auth.tsx diff --git a/sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx b/apps/sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx similarity index 87% rename from sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx rename to apps/sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx index 445700913..f80257978 100644 --- a/sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx +++ b/apps/sim/app/admin/waitlist/components/batch-actions/batch-actions.tsx @@ -1,5 +1,5 @@ -import { Button } from '@/components/ui/button' import { CheckSquareIcon, SquareIcon, UserCheckIcon, XIcon } from 'lucide-react' +import { Button } from '@/components/ui/button' interface BatchActionsProps { hasSelectedEmails: boolean @@ -22,13 +22,13 @@ export function BatchActions({ entriesExist, someSelected, }: BatchActionsProps) { - if (!entriesExist) return null; - + if (!entriesExist) return null + return (
                - + {hasSelectedEmails && ( <> - + )}
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx b/apps/sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx similarity index 76% rename from sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx rename to apps/sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx index 7d3aad84a..8369f5139 100644 --- a/sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx +++ b/apps/sim/app/admin/waitlist/components/batch-results-modal/batch-results-modal.tsx @@ -33,9 +33,7 @@ export function BatchResultsModal({ Batch Approval Results - - Results of the batch approval operation. - + Results of the batch approval operation.
                {results && results.length > 0 ? ( @@ -43,21 +41,25 @@ export function BatchResultsModal({
                Total: {results.length} - Success: {results.filter(r => r.success).length} / - Failed: {results.filter(r => !r.success).length} + Success: {results.filter((r) => r.success).length} / Failed:{' '} + {results.filter((r) => !r.success).length}
                {results.map((result, idx) => ( -
                - {result.success ? : } + {result.success ? ( + + ) : ( + + )} {result.email}
                {result.message}
                @@ -69,11 +71,9 @@ export function BatchResultsModal({ )}
                - + ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx b/apps/sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx similarity index 99% rename from sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx rename to apps/sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx index 601a9aad4..647dc1744 100644 --- a/sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx +++ b/apps/sim/app/admin/waitlist/components/filter-bar/components/filter-button.tsx @@ -1,5 +1,5 @@ -import { Button } from '@/components/ui/button' import { ReactNode } from 'react' +import { Button } from '@/components/ui/button' interface FilterButtonProps { active: boolean @@ -21,4 +21,4 @@ export function FilterButton({ active, onClick, icon, label, className }: Filter {label} ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx b/apps/sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx similarity index 96% rename from sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx rename to apps/sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx index 70a16b843..cfde1c120 100644 --- a/sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx +++ b/apps/sim/app/admin/waitlist/components/filter-bar/filter-bar.tsx @@ -1,9 +1,4 @@ -import { - UserIcon, - UserCheckIcon, - UserXIcon, - CheckIcon -} from 'lucide-react' +import { CheckIcon, UserCheckIcon, UserIcon, UserXIcon } from 'lucide-react' import { FilterButton } from './components/filter-button' interface FilterBarProps { @@ -71,4 +66,4 @@ export function FilterBar({ currentStatus, onStatusChange }: FilterBarProps) { />
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/pagination/pagination.tsx b/apps/sim/app/admin/waitlist/components/pagination/pagination.tsx similarity index 99% rename from sim/app/admin/waitlist/components/pagination/pagination.tsx rename to apps/sim/app/admin/waitlist/components/pagination/pagination.tsx index 9307251e9..ef0032b8b 100644 --- a/sim/app/admin/waitlist/components/pagination/pagination.tsx +++ b/apps/sim/app/admin/waitlist/components/pagination/pagination.tsx @@ -1,10 +1,10 @@ -import { Button } from '@/components/ui/button' import { ChevronLeftIcon, ChevronRightIcon, ChevronsLeftIcon, ChevronsRightIcon, } from 'lucide-react' +import { Button } from '@/components/ui/button' interface PaginationProps { page: number @@ -28,7 +28,7 @@ export function Pagination({ onLastPage, }: PaginationProps) { const totalPages = Math.max(1, Math.ceil(totalItems / itemsPerPage)) - + return (
                @@ -84,4 +84,4 @@ export function Pagination({
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/search-bar/search-bar.tsx b/apps/sim/app/admin/waitlist/components/search-bar/search-bar.tsx similarity index 97% rename from sim/app/admin/waitlist/components/search-bar/search-bar.tsx rename to apps/sim/app/admin/waitlist/components/search-bar/search-bar.tsx index 772b2dbde..fe6ca4f06 100644 --- a/sim/app/admin/waitlist/components/search-bar/search-bar.tsx +++ b/apps/sim/app/admin/waitlist/components/search-bar/search-bar.tsx @@ -1,6 +1,6 @@ import { useRef, useState } from 'react' -import { Input } from '@/components/ui/input' import { SearchIcon } from 'lucide-react' +import { Input } from '@/components/ui/input' interface SearchBarProps { initialValue: string @@ -13,7 +13,7 @@ export function SearchBar({ initialValue = '', onSearch, disabled = false, - placeholder = 'Search by email...' + placeholder = 'Search by email...', }: SearchBarProps) { const [searchInputValue, setSearchInputValue] = useState(initialValue) const searchTimeoutRef = useRef(null) @@ -46,4 +46,4 @@ export function SearchBar({ />
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx b/apps/sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx similarity index 72% rename from sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx rename to apps/sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx index 84f2a186d..6169e0703 100644 --- a/sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx +++ b/apps/sim/app/admin/waitlist/components/waitlist-alert/waitlist-alert.tsx @@ -1,6 +1,6 @@ -import { Button } from '@/components/ui/button' -import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' import { AlertCircleIcon } from 'lucide-react' +import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' +import { Button } from '@/components/ui/button' type AlertType = 'error' | 'email-error' | 'rate-limit' | null @@ -13,14 +13,10 @@ interface WaitlistAlertProps { export function WaitlistAlert({ type, message, onDismiss, onRefresh }: WaitlistAlertProps) { if (!type) return null - + return ( @@ -35,25 +31,15 @@ export function WaitlistAlert({ type, message, onDismiss, onRefresh }: WaitlistA {message}
                {onRefresh && ( - )} -
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx b/apps/sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx similarity index 96% rename from sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx rename to apps/sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx index 2819fd96a..cf0d4895b 100644 --- a/sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx +++ b/apps/sim/app/admin/waitlist/components/waitlist-table/waitlist-table.tsx @@ -1,3 +1,14 @@ +import { + CheckIcon, + CheckSquareIcon, + InfoIcon, + MailIcon, + RotateCcwIcon, + SquareIcon, + UserCheckIcon, + UserXIcon, + XIcon, +} from 'lucide-react' import { Badge } from '@/components/ui/badge' import { Button } from '@/components/ui/button' import { @@ -9,17 +20,6 @@ import { TableRow, } from '@/components/ui/table' import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip' -import { - CheckIcon, - InfoIcon, - MailIcon, - RotateCcwIcon, - UserCheckIcon, - UserXIcon, - XIcon, - CheckSquareIcon, - SquareIcon, -} from 'lucide-react' interface WaitlistEntry { id: string @@ -70,10 +70,7 @@ export function WaitlistTable({ {entries.map((entry) => ( - + {/* Add selection checkbox */} {status !== 'approved' && ( @@ -91,7 +88,7 @@ export function WaitlistTable({ )} - + {entry.email} {/* Status badge */} @@ -167,9 +164,7 @@ export function WaitlistTable({ )} - - Resend approval email with sign-up link - + Resend approval email with sign-up link )} @@ -224,4 +219,4 @@ export function WaitlistTable({
                ) -} \ No newline at end of file +} diff --git a/sim/app/admin/waitlist/page.tsx b/apps/sim/app/admin/waitlist/page.tsx similarity index 100% rename from sim/app/admin/waitlist/page.tsx rename to apps/sim/app/admin/waitlist/page.tsx diff --git a/sim/app/admin/waitlist/stores/store.ts b/apps/sim/app/admin/waitlist/stores/store.ts similarity index 99% rename from sim/app/admin/waitlist/stores/store.ts rename to apps/sim/app/admin/waitlist/stores/store.ts index 1a865fcf7..1508d6798 100644 --- a/sim/app/admin/waitlist/stores/store.ts +++ b/apps/sim/app/admin/waitlist/stores/store.ts @@ -119,7 +119,7 @@ export const useWaitlistStore = create((set, get) => ({ if (!response.ok) { let errorMessage = `Error ${response.status}: ${response.statusText}` - + // Try to parse the error message from the response body try { const errorData = await response.json() @@ -129,7 +129,7 @@ export const useWaitlistStore = create((set, get) => ({ } catch (parseError) { console.error('Failed to parse error response:', parseError) } - + throw new Error(errorMessage) } diff --git a/sim/app/admin/waitlist/waitlist.tsx b/apps/sim/app/admin/waitlist/waitlist.tsx similarity index 95% rename from sim/app/admin/waitlist/waitlist.tsx rename to apps/sim/app/admin/waitlist/waitlist.tsx index a557d89f6..1a414280f 100644 --- a/sim/app/admin/waitlist/waitlist.tsx +++ b/apps/sim/app/admin/waitlist/waitlist.tsx @@ -2,23 +2,19 @@ import { useCallback, useEffect, useState } from 'react' import { useRouter, useSearchParams } from 'next/navigation' -import { - AlertCircleIcon, - InfoIcon, - RotateCcwIcon, -} from 'lucide-react' +import { AlertCircleIcon, InfoIcon, RotateCcwIcon } from 'lucide-react' import { Alert, AlertDescription } from '@/components/ui/alert' import { Button } from '@/components/ui/button' import { Skeleton } from '@/components/ui/skeleton' import { Logger } from '@/lib/logs/console-logger' -import { useWaitlistStore } from './stores/store' -import { FilterBar } from './components/filter-bar/filter-bar' -import { SearchBar } from './components/search-bar/search-bar' -import { WaitlistAlert } from './components/waitlist-alert/waitlist-alert' -import { Pagination } from './components/pagination/pagination' import { BatchActions } from './components/batch-actions/batch-actions' import { BatchResultsModal } from './components/batch-results-modal/batch-results-modal' +import { FilterBar } from './components/filter-bar/filter-bar' +import { Pagination } from './components/pagination/pagination' +import { SearchBar } from './components/search-bar/search-bar' +import { WaitlistAlert } from './components/waitlist-alert/waitlist-alert' import { WaitlistTable as WaitlistDataTable } from './components/waitlist-table/waitlist-table' +import { useWaitlistStore } from './stores/store' const logger = new Logger('WaitlistTable') @@ -345,19 +341,23 @@ export function WaitlistTable() { const [selectedEmails, setSelectedEmails] = useState>({}) const [showBatchDialog, setShowBatchDialog] = useState(false) const [batchActionLoading, setBatchActionLoading] = useState(false) - const [batchResults, setBatchResults] = useState | null>(null) + const [batchResults, setBatchResults] = useState | null>(null) // Helper to check if any emails are selected const hasSelectedEmails = Object.values(selectedEmails).some(Boolean) - + // Count of selected emails const selectedEmailsCount = Object.values(selectedEmails).filter(Boolean).length // Toggle selection of a single email const toggleEmailSelection = (email: string) => { - setSelectedEmails(prev => ({ + setSelectedEmails((prev) => ({ ...prev, - [email]: !prev[email] + [email]: !prev[email], })) } @@ -368,17 +368,17 @@ export function WaitlistTable() { // Select/deselect all visible emails const toggleSelectAll = () => { - if (filteredEntries.some(entry => selectedEmails[entry.email])) { + if (filteredEntries.some((entry) => selectedEmails[entry.email])) { // If any are selected, deselect all const newSelection = { ...selectedEmails } - filteredEntries.forEach(entry => { + filteredEntries.forEach((entry) => { newSelection[entry.email] = false }) setSelectedEmails(newSelection) } else { // Select all visible entries const newSelection = { ...selectedEmails } - filteredEntries.forEach(entry => { + filteredEntries.forEach((entry) => { newSelection[entry.email] = true }) setSelectedEmails(newSelection) @@ -454,20 +454,20 @@ export function WaitlistTable() { // Success setShowBatchDialog(true) setBatchResults(data.results || []) - + // Clear selections for successfully approved emails if (data.results && Array.isArray(data.results)) { const successfulEmails = data.results .filter((result: { success: boolean }) => result.success) .map((result: { email: string }) => result.email) - + if (successfulEmails.length > 0) { const newSelection = { ...selectedEmails } successfulEmails.forEach((email: string) => { newSelection[email] = false }) setSelectedEmails(newSelection) - + // Refresh the entries to show updated statuses fetchEntries() } @@ -498,22 +498,15 @@ export function WaitlistTable() { {/* Top bar with filters, search and refresh */}
                {/* Filter buttons in a single row */} - + {/* Search and refresh aligned to the right */}
                - - - - - diff --git a/sim/app/layout.tsx b/apps/sim/app/layout.tsx similarity index 100% rename from sim/app/layout.tsx rename to apps/sim/app/layout.tsx diff --git a/sim/app/page.tsx b/apps/sim/app/page.tsx similarity index 100% rename from sim/app/page.tsx rename to apps/sim/app/page.tsx diff --git a/sim/app/telemetry-consent-dialog.tsx b/apps/sim/app/telemetry-consent-dialog.tsx similarity index 100% rename from sim/app/telemetry-consent-dialog.tsx rename to apps/sim/app/telemetry-consent-dialog.tsx diff --git a/sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx b/apps/sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx similarity index 96% rename from sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx rename to apps/sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx index f4d28b49d..f48322368 100644 --- a/sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx +++ b/apps/sim/app/w/[id]/components/code-prompt-bar/code-prompt-bar.tsx @@ -1,8 +1,8 @@ +import { useEffect, useRef, useState } from 'react' import { SendIcon, XIcon } from 'lucide-react' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { cn } from '@/lib/utils' -import { useEffect, useRef, useState } from 'react' interface CodePromptBarProps { isVisible: boolean @@ -46,11 +46,11 @@ export function CodePromptBar({ // Handle click outside const handleClickOutside = (event: MouseEvent) => { if ( - promptBarRef.current && - !promptBarRef.current.contains(event.target as Node) && - isVisible && - !isStreaming && - !isLoading && + promptBarRef.current && + !promptBarRef.current.contains(event.target as Node) && + isVisible && + !isStreaming && + !isLoading && !isExiting ) { handleCancel() @@ -59,7 +59,7 @@ export function CodePromptBar({ // Add event listener document.addEventListener('mousedown', handleClickOutside) - + // Cleanup event listener return () => { document.removeEventListener('mousedown', handleClickOutside) @@ -73,7 +73,7 @@ export function CodePromptBar({ } }, [isVisible]) - if ((!isVisible && !isStreaming) && !isExiting) { + if (!isVisible && !isStreaming && !isExiting) { return null } diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx similarity index 97% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx index fb95ee9d3..d3f055be3 100644 --- a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/chat-deploy/chat-deploy.tsx @@ -31,11 +31,11 @@ import { Label } from '@/components/ui/label' import { Skeleton } from '@/components/ui/skeleton' import { Textarea } from '@/components/ui/textarea' import { createLogger } from '@/lib/logs/console-logger' -import { cn } from '@/lib/utils' import { getBaseDomain } from '@/lib/urls/utils' +import { cn } from '@/lib/utils' import { useNotificationStore } from '@/stores/notifications/store' -import { OutputSelect } from '@/app/w/[id]/components/panel/components/chat/components/output-select/output-select' import { OutputConfig } from '@/stores/panel/chat/types' +import { OutputSelect } from '@/app/w/[id]/components/panel/components/chat/components/output-select/output-select' const logger = createLogger('ChatDeploy') @@ -196,7 +196,9 @@ export function ChatDeploy({ const subdomainChanged = subdomain !== originalValues.subdomain const titleChanged = title !== originalValues.title const descriptionChanged = description !== originalValues.description - const outputBlockChanged = selectedOutputBlocks.some((blockId) => !originalValues.selectedOutputIds.includes(blockId)) + const outputBlockChanged = selectedOutputBlocks.some( + (blockId) => !originalValues.selectedOutputIds.includes(blockId) + ) const welcomeMessageChanged = welcomeMessage !== (existingChat.customizations?.welcomeMessage || 'Hi there! How can I help you today?') @@ -268,8 +270,10 @@ export function ChatDeploy({ description: chatDetail.description || '', authType: chatDetail.authType || 'public', emails: Array.isArray(chatDetail.allowedEmails) ? [...chatDetail.allowedEmails] : [], - selectedOutputIds: Array.isArray(chatDetail.outputConfigs) - ? chatDetail.outputConfigs.map((config: OutputConfig) => `${config.blockId}_${config.path}`) + selectedOutputIds: Array.isArray(chatDetail.outputConfigs) + ? chatDetail.outputConfigs.map( + (config: OutputConfig) => `${config.blockId}_${config.path}` + ) : [], }) @@ -282,10 +286,10 @@ export function ChatDeploy({ // Inside the fetchExistingChat function - update how we load output configs if (chatDetail.outputConfigs) { - const configs = Array.isArray(chatDetail.outputConfigs) ? chatDetail.outputConfigs as OutputConfig[] : [] - const combinedOutputIds = configs.map(config => - `${config.blockId}_${config.path}` - ) + const configs = Array.isArray(chatDetail.outputConfigs) + ? (chatDetail.outputConfigs as OutputConfig[]) + : [] + const combinedOutputIds = configs.map((config) => `${config.blockId}_${config.path}`) setSelectedOutputBlocks(combinedOutputIds) } @@ -605,13 +609,13 @@ export function ChatDeploy({ // Add output block configuration if selected if (selectedOutputBlocks && selectedOutputBlocks.length > 0) { const outputConfigs = selectedOutputBlocks - .map(outputId => { + .map((outputId) => { const firstUnderscoreIndex = outputId.indexOf('_') // Only process IDs that have the correct blockId_path format if (firstUnderscoreIndex !== -1) { const blockId = outputId.substring(0, firstUnderscoreIndex) const path = outputId.substring(firstUnderscoreIndex + 1) - + // Additional validation to ensure both parts are non-empty if (blockId && path) { return { blockId, path } as OutputConfig @@ -619,18 +623,20 @@ export function ChatDeploy({ logger.warn(`Invalid output format: ${outputId}, missing blockId or path`) return null } - logger.warn(`Invalid output ID format: ${outputId}, missing required format blockId_path`) + logger.warn( + `Invalid output ID format: ${outputId}, missing required format blockId_path` + ) return null }) .filter(Boolean) as OutputConfig[] // Remove any null values - + // Only include output configurations if we have valid ones if (outputConfigs.length > 0) { payload.outputConfigs = outputConfigs - + logger.info('Added output configuration to payload:', { outputConfigsCount: outputConfigs.length, - outputConfigs: outputConfigs + outputConfigs: outputConfigs, }) } else { logger.warn('No valid output configurations found in selection') @@ -730,11 +736,11 @@ export function ChatDeploy({ if (chatUrl) { logger.info(`Chat ${existingChat ? 'updated' : 'deployed'} successfully:`, chatUrl) setDeployedChatUrl(chatUrl) - + if (onDeploymentComplete) { onDeploymentComplete() } - + if (onChatExistsChange) { onChatExistsChange(true) } @@ -819,7 +825,7 @@ export function ChatDeploy({ const url = new URL(deployedChatUrl) const hostname = url.hostname const isDevelopmentUrl = hostname.includes('localhost') - + let domainSuffix if (isDevelopmentUrl) { const baseDomain = getBaseDomain() @@ -829,7 +835,7 @@ export function ChatDeploy({ } else { domainSuffix = '.simstudio.ai' } - + const subdomainPart = isDevelopmentUrl ? hostname.split('.')[0] : hostname.split('.simstudio.ai')[0] @@ -854,7 +860,17 @@ export function ChatDeploy({ {domainSuffix}
                -

                Your chat is now live at this URL

                +

                + Your chat is now live at{' '} + + this URL + +

            ) diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deploy-form/deploy-form.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deploy-form/deploy-form.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deploy-form/deploy-form.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deploy-form/deploy-form.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-endpoint/api-endpoint.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-endpoint/api-endpoint.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-endpoint/api-endpoint.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-endpoint/api-endpoint.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-key/api-key.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-key/api-key.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-key/api-key.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-key/api-key.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/deploy-status/deploy-status.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/deploy-status/deploy-status.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/deploy-status/deploy-status.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/deploy-status/deploy-status.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/example-command/example-command.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/example-command/example-command.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/example-command/example-command.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/example-command/example-command.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx similarity index 92% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx index af0c279f6..b1482d6fe 100644 --- a/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/deployment-info.tsx @@ -15,12 +15,12 @@ import { } from '@/components/ui/alert-dialog' import { Button } from '@/components/ui/button' import { Skeleton } from '@/components/ui/skeleton' +import { useNotificationStore } from '@/stores/notifications/store' import { ApiEndpoint } from '@/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-endpoint/api-endpoint' import { ApiKey } from '@/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/api-key/api-key' import { DeployStatus } from '@/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/deploy-status/deploy-status' import { ExampleCommand } from '@/app/w/[id]/components/control-bar/components/deploy-modal/components/deployment-info/components/example-command/example-command' import { DeployedWorkflowModal } from '../../../deployment-controls/components/deployed-workflow-modal' -import { useNotificationStore } from '@/stores/notifications/store' interface DeploymentInfoProps { isLoading: boolean @@ -54,32 +54,24 @@ export function DeploymentInfo({ const handleViewDeployed = async () => { if (!workflowId) { - addNotification( - 'error', - 'Cannot view deployment: Workflow ID is missing', - null - ) + addNotification('error', 'Cannot view deployment: Workflow ID is missing', null) return } try { const response = await fetch(`/api/workflows/${workflowId}/deployed`) - + if (!response.ok) { throw new Error('Failed to fetch deployed workflow') } const data = await response.json() - + if (data && data.deployedState) { setDeployedWorkflowState(data.deployedState) setIsViewingDeployed(true) } else { - addNotification( - 'error', - 'Failed to view deployment: No deployment state found', - workflowId - ) + addNotification('error', 'Failed to view deployment: No deployment state found', workflowId) } } catch (error) { console.error('Error fetching deployed workflow:', error) @@ -137,11 +129,7 @@ export function DeploymentInfo({
            - {deploymentInfo.needsRedeployment && ( @@ -161,8 +149,8 @@ export function DeploymentInfo({ Undeploy API - Are you sure you want to undeploy this workflow? This will remove the API endpoint - and make it unavailable to external users. + Are you sure you want to undeploy this workflow? This will remove the API + endpoint and make it unavailable to external users. diff --git a/sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx similarity index 99% rename from sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx index 43785aca9..b2ea2d836 100644 --- a/sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deploy-modal/deploy-modal.tsx @@ -496,8 +496,8 @@ export function DeployModal({ } const handleChatDeploymentComplete = () => { - setChatSubmitting(false); - }; + setChatSubmitting(false) + } // Render deployed chat view const renderDeployedChatView = () => { diff --git a/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx index 2e06694e2..9f728486d 100644 --- a/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-card.tsx @@ -1,10 +1,10 @@ 'use client' import { useState } from 'react' -import { Card, CardContent, CardHeader } from '@/components/ui/card' import { Button } from '@/components/ui/button' -import { WorkflowPreview } from '@/app/w/components/workflow-preview/generic-workflow-preview' +import { Card, CardContent, CardHeader } from '@/components/ui/card' import { cn } from '@/lib/utils' +import { WorkflowPreview } from '@/app/w/components/workflow-preview/generic-workflow-preview' interface DeployedWorkflowCardProps { // Current workflow state (if any) diff --git a/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx similarity index 82% rename from sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx index a95192539..340445940 100644 --- a/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/components/deployed-workflow-modal.tsx @@ -1,5 +1,18 @@ 'use client' +import { useState } from 'react' +import { + AlertDialog, + AlertDialogAction, + AlertDialogCancel, + AlertDialogContent, + AlertDialogDescription, + AlertDialogFooter, + AlertDialogHeader, + AlertDialogTitle, + AlertDialogTrigger, +} from '@/components/ui/alert-dialog' +import { Button } from '@/components/ui/button' import { Dialog, DialogContent, @@ -7,13 +20,10 @@ import { DialogHeader, DialogTitle, } from '@/components/ui/dialog' -import { Button } from '@/components/ui/button' -import { AlertDialog, AlertDialogAction, AlertDialogCancel, AlertDialogContent, AlertDialogDescription, AlertDialogFooter, AlertDialogHeader, AlertDialogTitle, AlertDialogTrigger } from '@/components/ui/alert-dialog' -import { DeployedWorkflowCard } from './deployed-workflow-card' -import { useWorkflowStore } from '@/stores/workflows/workflow/store' -import { useState } from 'react' -import { mergeSubblockState } from '@/stores/workflows/utils' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { mergeSubblockState } from '@/stores/workflows/utils' +import { useWorkflowStore } from '@/stores/workflows/workflow/store' +import { DeployedWorkflowCard } from './deployed-workflow-card' interface DeployedWorkflowModalProps { isOpen: boolean @@ -49,7 +59,7 @@ export function DeployedWorkflowModal({ return ( - e.preventDefault()} @@ -61,7 +71,7 @@ export function DeployedWorkflowModal({ Deployed Workflow
            - + - + - + Revert to Deployed Version? - This will replace your current workflow with the deployed version. - Any unsaved changes will be lost. This action cannot be undone. + This will replace your current workflow with the deployed version. Any unsaved + changes will be lost. This action cannot be undone. Cancel - Revert @@ -104,4 +109,4 @@ export function DeployedWorkflowModal({ ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx similarity index 99% rename from sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx index 5026f955c..7e78c465b 100644 --- a/sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx +++ b/apps/sim/app/w/[id]/components/control-bar/components/deployment-controls/deployment-controls.tsx @@ -77,4 +77,4 @@ export function DeploymentControls({ /> ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/control-bar/components/history-dropdown-item/history-dropdown-item.tsx b/apps/sim/app/w/[id]/components/control-bar/components/history-dropdown-item/history-dropdown-item.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/history-dropdown-item/history-dropdown-item.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/history-dropdown-item/history-dropdown-item.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/marketplace-modal/marketplace-modal.tsx b/apps/sim/app/w/[id]/components/control-bar/components/marketplace-modal/marketplace-modal.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/marketplace-modal/marketplace-modal.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/marketplace-modal/marketplace-modal.tsx diff --git a/sim/app/w/[id]/components/control-bar/components/notification-dropdown-item/notification-dropdown-item.tsx b/apps/sim/app/w/[id]/components/control-bar/components/notification-dropdown-item/notification-dropdown-item.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/components/notification-dropdown-item/notification-dropdown-item.tsx rename to apps/sim/app/w/[id]/components/control-bar/components/notification-dropdown-item/notification-dropdown-item.tsx diff --git a/sim/app/w/[id]/components/control-bar/control-bar.tsx b/apps/sim/app/w/[id]/components/control-bar/control-bar.tsx similarity index 100% rename from sim/app/w/[id]/components/control-bar/control-bar.tsx rename to apps/sim/app/w/[id]/components/control-bar/control-bar.tsx diff --git a/sim/app/w/[id]/components/copilot/copilot.tsx b/apps/sim/app/w/[id]/components/copilot/copilot.tsx similarity index 100% rename from sim/app/w/[id]/components/copilot/copilot.tsx rename to apps/sim/app/w/[id]/components/copilot/copilot.tsx diff --git a/sim/app/w/[id]/components/error/index.tsx b/apps/sim/app/w/[id]/components/error/index.tsx similarity index 100% rename from sim/app/w/[id]/components/error/index.tsx rename to apps/sim/app/w/[id]/components/error/index.tsx diff --git a/sim/app/w/[id]/components/notifications/notifications.tsx b/apps/sim/app/w/[id]/components/notifications/notifications.tsx similarity index 99% rename from sim/app/w/[id]/components/notifications/notifications.tsx rename to apps/sim/app/w/[id]/components/notifications/notifications.tsx index 37812add8..db72b1b35 100644 --- a/sim/app/w/[id]/components/notifications/notifications.tsx +++ b/apps/sim/app/w/[id]/components/notifications/notifications.tsx @@ -639,4 +639,4 @@ export function NotificationAlert({ notification, isFading, onHide }: Notificati /> ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/panel/components/chat/chat.tsx b/apps/sim/app/w/[id]/components/panel/components/chat/chat.tsx similarity index 91% rename from sim/app/w/[id]/components/panel/components/chat/chat.tsx rename to apps/sim/app/w/[id]/components/panel/components/chat/chat.tsx index 912098bbc..06acf7600 100644 --- a/sim/app/w/[id]/components/panel/components/chat/chat.tsx +++ b/apps/sim/app/w/[id]/components/panel/components/chat/chat.tsx @@ -5,16 +5,16 @@ import { ArrowUp } from 'lucide-react' import { Button } from '@/components/ui/button' import { Input } from '@/components/ui/input' import { ScrollArea } from '@/components/ui/scroll-area' +import { buildTraceSpans } from '@/lib/logs/trace-spans' import { useExecutionStore } from '@/stores/execution/store' import { useChatStore } from '@/stores/panel/chat/store' import { useConsoleStore } from '@/stores/panel/console/store' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' +import { BlockLog } from '@/executor/types' +import { calculateCost } from '@/providers/utils' import { useWorkflowExecution } from '../../../../hooks/use-workflow-execution' import { ChatMessage } from './components/chat-message/chat-message' import { OutputSelect } from './components/output-select/output-select' -import { BlockLog } from '@/executor/types' -import { calculateCost } from '@/providers/utils' -import { buildTraceSpans } from '@/lib/logs/trace-spans' interface ChatProps { panelWidth: number @@ -24,13 +24,13 @@ interface ChatProps { export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { const { activeWorkflowId } = useWorkflowRegistry() - const { - messages, - addMessage, - selectedWorkflowOutputs, + const { + messages, + addMessage, + selectedWorkflowOutputs, setSelectedWorkflowOutput, appendMessageContent, - finalizeMessageStream + finalizeMessageStream, } = useChatStore() const { entries } = useConsoleStore() const messagesEndRef = useRef(null) @@ -59,21 +59,21 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { const selectedOutputs = useMemo(() => { if (!activeWorkflowId) return [] const selected = selectedWorkflowOutputs[activeWorkflowId] - + if (!selected || selected.length === 0) { const defaultSelection = outputEntries.length > 0 ? [outputEntries[0].id] : [] return defaultSelection } - + // Ensure we have no duplicates in the selection const dedupedSelection = [...new Set(selected)] - + // If deduplication removed items, update the store if (dedupedSelection.length !== selected.length) { setSelectedWorkflowOutput(activeWorkflowId, dedupedSelection) return dedupedSelection } - + return selected }, [selectedWorkflowOutputs, activeWorkflowId, outputEntries, setSelectedWorkflowOutput]) @@ -103,40 +103,40 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { // Execute the workflow to generate a response, passing the chat message as input const result = await handleRunWorkflow({ input: sentMessage }) - + // Check if we got a streaming response if (result && 'stream' in result && result.stream instanceof ReadableStream) { // Generate a unique ID for the message const messageId = crypto.randomUUID() - + // Create a content buffer to collect initial content let initialContent = '' let fullContent = '' // Store the complete content for updating logs later let hasAddedMessage = false let executionResult = (result as any).execution // Store the execution result with type assertion - + try { // Process the stream const reader = result.stream.getReader() const decoder = new TextDecoder() - - console.log("Starting to read from stream") - + + console.log('Starting to read from stream') + while (true) { try { const { done, value } = await reader.read() if (done) { - console.log("Stream complete") + console.log('Stream complete') break } - + // Decode and append chunk const chunk = decoder.decode(value, { stream: true }) // Use stream option - + if (chunk) { initialContent += chunk fullContent += chunk - + // Only add the message to UI once we have some actual content to show if (!hasAddedMessage && initialContent.trim().length > 0) { // Add message with initial content - cast to any to bypass type checking for id @@ -145,7 +145,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { workflowId: activeWorkflowId, type: 'workflow', isStreaming: true, - id: messageId + id: messageId, } as any) hasAddedMessage = true } else if (hasAddedMessage) { @@ -159,45 +159,46 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { break } } - + // If we never added a message (no content received), add it now if (!hasAddedMessage && initialContent.trim().length > 0) { addMessage({ content: initialContent, workflowId: activeWorkflowId, type: 'workflow', - id: messageId + id: messageId, } as any) } - + // Update logs with the full streaming content if available if (executionResult && fullContent.trim().length > 0) { try { // Format the final content properly to match what's shown for manual executions // Include all the markdown and formatting from the streamed response const formattedContent = fullContent - + // Calculate cost based on token usage if available let costData = undefined - + if (executionResult.output?.response?.tokens) { const tokens = executionResult.output.response.tokens const model = executionResult.output?.response?.model || 'gpt-4o' const cost = calculateCost( - model, - tokens.prompt || 0, - tokens.completion || 0, + model, + tokens.prompt || 0, + tokens.completion || 0, false // Don't use cached input for chat responses ) costData = { ...cost, model } as any } - + // Build trace spans and total duration before persisting const { traceSpans, totalDuration } = buildTraceSpans(executionResult as any) - + // Create a completed execution ID - const completedExecutionId = executionResult.metadata?.executionId || crypto.randomUUID() - + const completedExecutionId = + executionResult.metadata?.executionId || crypto.randomUUID() + // Import the workflow execution hook for direct access to the workflow service const workflowExecutionApi = await fetch(`/api/workflows/${activeWorkflowId}/log`, { method: 'POST', @@ -216,19 +217,20 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { model: executionResult.output?.response?.model, tokens: executionResult.output?.response?.tokens, toolCalls: executionResult.output?.response?.toolCalls, - providerTiming: executionResult.output?.response?.providerTiming, + providerTiming: executionResult.output?.response?.providerTiming, cost: costData || executionResult.output?.response?.cost, - } + }, }, cost: costData, // Update the message to include the formatted content logs: (executionResult.logs || []).map((log: BlockLog) => { // Check if this is the streaming block by comparing with the selected output IDs // Selected output IDs typically include the block ID we are streaming from - const isStreamingBlock = selectedOutputs.some(outputId => - outputId === log.blockId || outputId.startsWith(`${log.blockId}_`) + const isStreamingBlock = selectedOutputs.some( + (outputId) => + outputId === log.blockId || outputId.startsWith(`${log.blockId}_`) ) - + if (isStreamingBlock && log.blockType === 'agent' && log.output?.response) { return { ...log, @@ -239,8 +241,8 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { content: formattedContent, providerTiming: log.output.response.providerTiming, cost: costData || log.output.response.cost, - } - } + }, + }, } } return log @@ -255,10 +257,10 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { }, traceSpans: traceSpans, totalDuration: totalDuration, - } + }, }), }) - + if (!workflowExecutionApi.ok) { console.error('Failed to log complete streaming execution') } @@ -268,21 +270,21 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { } } catch (error) { console.error('Error processing stream:', error) - + // If there's an error and we haven't added a message yet, add an error message if (!hasAddedMessage) { addMessage({ - content: "Error: Failed to process the streaming response.", + content: 'Error: Failed to process the streaming response.', workflowId: activeWorkflowId, type: 'workflow', - id: messageId + id: messageId, } as any) } else { // Otherwise append the error to the existing message - appendMessageContent(messageId, "\n\nError: Failed to process the streaming response.") + appendMessageContent(messageId, '\n\nError: Failed to process the streaming response.') } } finally { - console.log("Finalizing stream") + console.log('Finalizing stream') if (hasAddedMessage) { finalizeMessageStream(messageId) } @@ -302,7 +304,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) { const handleOutputSelection = (values: string[]) => { // Ensure no duplicates in selection const dedupedValues = [...new Set(values)] - + if (activeWorkflowId) { // If array is empty, explicitly set to empty array to ensure complete reset if (dedupedValues.length === 0) { @@ -317,7 +319,7 @@ export function Chat({ panelWidth, chatMessage, setChatMessage }: ChatProps) {
            {/* Output Source Dropdown */}
            - {message.type !== 'user' && Workflow} {message.isStreaming && ( - β€’β€’β€’ + + β€’β€’β€’ + )}
            diff --git a/sim/app/w/[id]/components/panel/components/chat/components/chat-modal/chat-modal.tsx b/apps/sim/app/w/[id]/components/panel/components/chat/components/chat-modal/chat-modal.tsx similarity index 100% rename from sim/app/w/[id]/components/panel/components/chat/components/chat-modal/chat-modal.tsx rename to apps/sim/app/w/[id]/components/panel/components/chat/components/chat-modal/chat-modal.tsx diff --git a/sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx b/apps/sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx similarity index 97% rename from sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx rename to apps/sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx index b493b9173..c4565a5df 100644 --- a/sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx +++ b/apps/sim/app/w/[id]/components/panel/components/chat/components/output-select/output-select.tsx @@ -1,9 +1,9 @@ import { useEffect, useMemo, useRef, useState } from 'react' -import { ChevronDown, Check } from 'lucide-react' +import { Check, ChevronDown } from 'lucide-react' +import { Button } from '@/components/ui/button' import { cn } from '@/lib/utils' import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { getBlock } from '@/blocks' -import { Button } from '@/components/ui/button' interface OutputSelectProps { workflowId: string | null @@ -78,18 +78,18 @@ export function OutputSelect({ }, [blocks, workflowId]) // Get selected outputs display text - const selectedOutputsDisplayText = useMemo(() => { + const selectedOutputsDisplayText = useMemo(() => { if (!selectedOutputs || selectedOutputs.length === 0) { return placeholder } - + // Ensure all selected outputs exist in the workflowOutputs array - const validOutputs = selectedOutputs.filter(id => workflowOutputs.some(o => o.id === id)) - + const validOutputs = selectedOutputs.filter((id) => workflowOutputs.some((o) => o.id === id)) + if (validOutputs.length === 0) { return placeholder } - + if (validOutputs.length === 1) { const output = workflowOutputs.find((o) => o.id === validOutputs[0]) if (output) { @@ -97,17 +97,17 @@ export function OutputSelect({ } return placeholder } - + return `${validOutputs.length} outputs selected` }, [selectedOutputs, workflowOutputs, placeholder]) // Get first selected output info for display icon const selectedOutputInfo = useMemo(() => { if (!selectedOutputs || selectedOutputs.length === 0) return null - - const validOutputs = selectedOutputs.filter(id => workflowOutputs.some(o => o.id === id)) + + const validOutputs = selectedOutputs.filter((id) => workflowOutputs.some((o) => o.id === id)) if (validOutputs.length === 0) return null - + const output = workflowOutputs.find((o) => o.id === validOutputs[0]) if (!output) return null @@ -216,13 +216,13 @@ export function OutputSelect({ const handleOutputSelection = (value: string) => { let newSelectedOutputs: string[] const index = selectedOutputs.indexOf(value) - + if (index === -1) { newSelectedOutputs = [...new Set([...selectedOutputs, value])] } else { newSelectedOutputs = selectedOutputs.filter((id) => id !== value) } - + onOutputSelect(newSelectedOutputs) } @@ -311,7 +311,7 @@ export function OutputSelect({
            ))}
            - + {/* Done button to close dropdown */}
            - -
            -
            +
            - +
            ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/panel/components/console/console.tsx b/apps/sim/app/w/[id]/components/panel/components/console/console.tsx similarity index 100% rename from sim/app/w/[id]/components/panel/components/console/console.tsx rename to apps/sim/app/w/[id]/components/panel/components/console/console.tsx diff --git a/sim/app/w/[id]/components/panel/components/variables/variables.tsx b/apps/sim/app/w/[id]/components/panel/components/variables/variables.tsx similarity index 100% rename from sim/app/w/[id]/components/panel/components/variables/variables.tsx rename to apps/sim/app/w/[id]/components/panel/components/variables/variables.tsx diff --git a/sim/app/w/[id]/components/panel/panel.tsx b/apps/sim/app/w/[id]/components/panel/panel.tsx similarity index 100% rename from sim/app/w/[id]/components/panel/panel.tsx rename to apps/sim/app/w/[id]/components/panel/panel.tsx diff --git a/sim/app/w/[id]/components/toolbar/components/toolbar-block/toolbar-block.tsx b/apps/sim/app/w/[id]/components/toolbar/components/toolbar-block/toolbar-block.tsx similarity index 100% rename from sim/app/w/[id]/components/toolbar/components/toolbar-block/toolbar-block.tsx rename to apps/sim/app/w/[id]/components/toolbar/components/toolbar-block/toolbar-block.tsx diff --git a/sim/app/w/[id]/components/toolbar/components/toolbar-tabs/toolbar-tabs.tsx b/apps/sim/app/w/[id]/components/toolbar/components/toolbar-tabs/toolbar-tabs.tsx similarity index 100% rename from sim/app/w/[id]/components/toolbar/components/toolbar-tabs/toolbar-tabs.tsx rename to apps/sim/app/w/[id]/components/toolbar/components/toolbar-tabs/toolbar-tabs.tsx diff --git a/sim/app/w/[id]/components/toolbar/toolbar.tsx b/apps/sim/app/w/[id]/components/toolbar/toolbar.tsx similarity index 100% rename from sim/app/w/[id]/components/toolbar/toolbar.tsx rename to apps/sim/app/w/[id]/components/toolbar/toolbar.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/action-bar/action-bar.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/action-bar/action-bar.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/action-bar/action-bar.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/action-bar/action-bar.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/connection-blocks/connection-blocks.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/connection-blocks/connection-blocks.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/connection-blocks/connection-blocks.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/connection-blocks/connection-blocks.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/checkbox-list.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/checkbox-list.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/checkbox-list.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/checkbox-list.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/code.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/code.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/code.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/code.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/condition-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/condition-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/condition-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/condition-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/components/oauth-required-modal.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx index 2d07397b3..03ea3d20e 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/credential-selector/credential-selector.tsx @@ -22,7 +22,7 @@ import { parseProvider, } from '@/lib/oauth' import { saveToStorage } from '@/stores/workflows/persistence' -import { OAuthRequiredModal } from '../credential-selector/components/oauth-required-modal' +import { OAuthRequiredModal } from './components/oauth-required-modal' const logger = createLogger('CredentialSelector') diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/date-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/date-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/date-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/date-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/dropdown.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/dropdown.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/dropdown.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/dropdown.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/eval-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/eval-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/eval-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/eval-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/confluence-file-selector.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/confluence-file-selector.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/confluence-file-selector.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/confluence-file-selector.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/google-drive-picker.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/google-drive-picker.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/google-drive-picker.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/google-drive-picker.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx similarity index 97% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx index 91acd0e7d..aaeef4cc9 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/components/jira-issue-selector.tsx @@ -13,6 +13,7 @@ import { CommandList, } from '@/components/ui/command' import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover' +import { Logger } from '@/lib/logs/console-logger' import { Credential, getProviderIdFromServiceId, @@ -21,7 +22,6 @@ import { } from '@/lib/oauth' import { saveToStorage } from '@/stores/workflows/persistence' import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal' -import { Logger } from '@/lib/logs/console-logger' const logger = new Logger('jira_issue_selector') @@ -85,10 +85,11 @@ export function JiraIssueSelector({ // Set a new timeout searchTimeoutRef.current = setTimeout(() => { - if (value.length >= 1) { // Changed from > 2 to >= 1 to be more responsive - fetchIssues(value) + if (value.length >= 1) { + // Changed from > 2 to >= 1 to be more responsive + fetchIssues(value) } else { - setIssues([]) // Clear issues if search is empty + setIssues([]) // Clear issues if search is empty } }, 500) // 500ms debounce } @@ -154,7 +155,6 @@ export function JiraIssueSelector({ // Fetch issue info when we have a selected issue ID const fetchIssueInfo = useCallback( async (issueId: string) => { - // Validate domain format const trimmedDomain = domain.trim().toLowerCase() if (!trimmedDomain.includes('.')) { @@ -201,7 +201,7 @@ export function JiraIssueSelector({ domain, accessToken, issueId, - cloudId + cloudId, }), }) @@ -216,7 +216,7 @@ export function JiraIssueSelector({ logger.info('Using cloud ID:', data.cloudId) setCloudId(data.cloudId) } - + if (data.issue) { logger.info('Successfully fetched issue:', data.issue.name) setSelectedIssue(data.issue) @@ -240,7 +240,7 @@ export function JiraIssueSelector({ ) // Fetch issues from Jira - const fetchIssues = useCallback( + const fetchIssues = useCallback( async (searchQuery?: string) => { if (!selectedCredentialId || !domain) return @@ -298,7 +298,7 @@ export function JiraIssueSelector({ ...(searchQuery && { query: searchQuery }), ...(cloudId && { cloudId }), }) - + const response = await fetch(`/api/auth/oauth/jira/issues?${queryParams.toString()}`, { method: 'GET', headers: { @@ -313,14 +313,14 @@ export function JiraIssueSelector({ } const data = await response.json() - + if (data.cloudId) { setCloudId(data.cloudId) } - + // Process the issue picker results let foundIssues: JiraIssueInfo[] = [] - + // Handle the sections returned by the issue picker API if (data.sections) { // Combine issues from all sections @@ -337,12 +337,12 @@ export function JiraIssueSelector({ } }) } - + logger.info(`Received ${foundIssues.length} issues from API`) setIssues(foundIssues) // If we have a selected issue ID, find the issue info - if (selectedIssueId) { + if (selectedIssueId) { const issueInfo = foundIssues.find((issue: JiraIssueInfo) => issue.id === selectedIssueId) if (issueInfo) { setSelectedIssue(issueInfo) @@ -360,7 +360,15 @@ export function JiraIssueSelector({ setIsLoading(false) } }, - [selectedCredentialId, domain, selectedIssueId, onIssueInfoChange, fetchIssueInfo, cloudId, projectId] + [ + selectedCredentialId, + domain, + selectedIssueId, + onIssueInfoChange, + fetchIssueInfo, + cloudId, + projectId, + ] ) // Fetch credentials on initial mount @@ -371,7 +379,6 @@ export function JiraIssueSelector({ } }, [fetchCredentials]) - // Handle open change const handleOpenChange = (isOpen: boolean) => { setOpen(isOpen) @@ -384,7 +391,13 @@ export function JiraIssueSelector({ // Fetch selected issue metadata once credentials are ready or changed useEffect(() => { - if (value && selectedCredentialId && domain && domain.includes('.') && (!selectedIssue || selectedIssue.id !== value)) { + if ( + value && + selectedCredentialId && + domain && + domain.includes('.') && + (!selectedIssue || selectedIssue.id !== value) + ) { fetchIssueInfo(value) } }, [value, selectedCredentialId, selectedIssue, domain, fetchIssueInfo]) @@ -447,7 +460,7 @@ export function JiraIssueSelector({ {selectedIssue.name}
            - ) : ( + ) : (
            {label} @@ -596,7 +609,7 @@ export function JiraIssueSelector({ )}
            - {selectedIssue.webViewLink ? ( + {selectedIssue.webViewLink ? ( ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx similarity index 97% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx index c308b01f9..183629156 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-selector/file-selector-input.tsx @@ -4,8 +4,8 @@ import { useEffect, useState } from 'react' import { useSubBlockStore } from '@/stores/workflows/subblock/store' import { SubBlockConfig } from '@/blocks/types' import { ConfluenceFileInfo, ConfluenceFileSelector } from './components/confluence-file-selector' -import { JiraIssueInfo, JiraIssueSelector } from './components/jira-issue-selector' import { FileInfo, GoogleDrivePicker } from './components/google-drive-picker' +import { JiraIssueInfo, JiraIssueSelector } from './components/jira-issue-selector' interface FileSelectorInputProps { blockId: string @@ -27,7 +27,8 @@ export function FileSelectorInput({ blockId, subBlock, disabled = false }: FileS // For Confluence and Jira, we need the domain and credentials const domain = isConfluence || isJira ? (getValue(blockId, 'domain') as string) || '' : '' - const credentials = isConfluence || isJira ? (getValue(blockId, 'credential') as string) || '' : '' + const credentials = + isConfluence || isJira ? (getValue(blockId, 'credential') as string) || '' : '' // Get the current value from the store useEffect(() => { @@ -53,7 +54,7 @@ export function FileSelectorInput({ blockId, subBlock, disabled = false }: FileS setSelectedIssueId(issueKey) setIssueInfo(info || null) setValue(blockId, subBlock.id, issueKey) - + // Clear the fields when a new issue is selected if (isJira) { setValue(blockId, 'summary', '') @@ -117,4 +118,4 @@ export function FileSelectorInput({ blockId, subBlock, disabled = false }: FileS apiKey={apiKey} /> ) -} \ No newline at end of file +} diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx index b2d627e7d..b9bae36ae 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/file-upload.tsx @@ -142,7 +142,7 @@ export function FileUpload({ // Try to get pre-signed URLs first for direct upload let useDirectUpload = false - + // Upload each file separately for (const file of validFiles) { try { @@ -160,11 +160,11 @@ export function FileUpload({ }) const presignedData = await presignedResponse.json() - + if (presignedResponse.ok && presignedData.directUploadSupported) { // Use direct upload method useDirectUpload = true - + // Upload directly to S3 using the pre-signed URL const uploadResponse = await fetch(presignedData.presignedUrl, { method: 'PUT', @@ -175,16 +175,17 @@ export function FileUpload({ }) if (!uploadResponse.ok) { - throw new Error(`Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}`) + throw new Error( + `Direct upload failed: ${uploadResponse.status} ${uploadResponse.statusText}` + ) } // Use the file info returned from the presigned URL endpoint uploadedFiles.push(presignedData.fileInfo) - } else { // Fallback to traditional upload through API route useDirectUpload = false - + // Create FormData for upload const formData = new FormData() formData.append('file', file) diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/components/folder-selector-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/folder-selector.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/folder-selector.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/folder-selector.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/folder-selector/folder-selector.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx index a1fb58250..2b90e2fbb 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/long-input.tsx @@ -130,7 +130,7 @@ export function LongInput({ const finalHeight = parseInt(textareaRef.current.style.height, 10) || height setHeight(finalHeight) } - + isResizing.current = false document.removeEventListener('mousemove', handleMouseMove) document.removeEventListener('mouseup', handleMouseUp) diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx similarity index 98% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx index ff2c63cf1..43224c391 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/components/jira-project-selector.tsx @@ -13,6 +13,7 @@ import { CommandList, } from '@/components/ui/command' import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover' +import { Logger } from '@/lib/logs/console-logger' import { Credential, getProviderIdFromServiceId, @@ -21,7 +22,6 @@ import { } from '@/lib/oauth' import { saveToStorage } from '@/stores/workflows/persistence' import { OAuthRequiredModal } from '../../credential-selector/components/oauth-required-modal' -import { Logger } from '@/lib/logs/console-logger' const logger = new Logger('jira_project_selector') @@ -193,7 +193,7 @@ export function JiraProjectSelector({ domain, accessToken, projectId, - ...(cloudId && { cloudId }) + ...(cloudId && { cloudId }), }) const response = await fetch(`/api/auth/oauth/jira/project?${queryParams.toString()}`) @@ -205,7 +205,7 @@ export function JiraProjectSelector({ } const projectInfo = await response.json() - + if (projectInfo.cloudId) { setCloudId(projectInfo.cloudId) } @@ -276,7 +276,7 @@ export function JiraProjectSelector({ domain, accessToken, ...(searchQuery && { query: searchQuery }), - ...(cloudId && { cloudId }) + ...(cloudId && { cloudId }), }) // Use the GET endpoint for project search @@ -289,7 +289,7 @@ export function JiraProjectSelector({ } const data = await response.json() - + if (data.cloudId) { setCloudId(data.cloudId) } @@ -320,7 +320,14 @@ export function JiraProjectSelector({ setIsLoading(false) } }, - [selectedCredentialId, domain, selectedProjectId, onProjectInfoChange, fetchProjectInfo, cloudId] + [ + selectedCredentialId, + domain, + selectedProjectId, + onProjectInfoChange, + fetchProjectInfo, + cloudId, + ] ) // Fetch credentials on initial mount @@ -438,10 +445,7 @@ export function JiraProjectSelector({ )} - + {isLoading ? ( diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx similarity index 89% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx index fba637617..201a0df39 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/project-selector/project-selector-input.tsx @@ -12,11 +12,11 @@ interface ProjectSelectorInputProps { onProjectSelect?: (projectId: string) => void } -export function ProjectSelectorInput({ - blockId, - subBlock, +export function ProjectSelectorInput({ + blockId, + subBlock, disabled = false, - onProjectSelect + onProjectSelect, }: ProjectSelectorInputProps) { const { getValue, setValue } = useSubBlockStore() const [selectedProjectId, setSelectedProjectId] = useState('') @@ -24,10 +24,10 @@ export function ProjectSelectorInput({ // Get provider-specific values const provider = subBlock.provider || 'jira' - + // For Jira, we need the domain - const domain = getValue(blockId, 'domain') as string || '' - const credentials = getValue(blockId, 'credential') as string || '' + const domain = (getValue(blockId, 'domain') as string) || '' + const credentials = (getValue(blockId, 'credential') as string) || '' // Get the current value from the store useEffect(() => { @@ -42,14 +42,14 @@ export function ProjectSelectorInput({ setSelectedProjectId(projectId) setProjectInfo(info || null) setValue(blockId, subBlock.id, projectId) - + // Clear the issue-related fields when a new project is selected if (provider === 'jira') { setValue(blockId, 'summary', '') setValue(blockId, 'description', '') setValue(blockId, 'issueKey', '') } - + onProjectSelect?.(projectId) } diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx index a90776cae..691ff3976 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/components/schedule-modal.tsx @@ -27,9 +27,9 @@ import { import { createLogger } from '@/lib/logs/console-logger' import { cn } from '@/lib/utils' import { useSubBlockStore } from '@/stores/workflows/subblock/store' -import { UnsavedChangesDialog } from '../../../components/webhook/components/ui/confirmation' import { useSubBlockValue } from '../../../hooks/use-sub-block-value' import { TimeInput } from '../../time-input' +import { UnsavedChangesDialog } from '../../webhook/components/ui/confirmation' const logger = createLogger('ScheduleModal') diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/schedule-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/schedule-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/schedule-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/schedule/schedule-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/short-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/short-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/short-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/short-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/slider-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/slider-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/slider-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/slider-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/starter/input-format.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/starter/input-format.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/starter/input-format.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/starter/input-format.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/switch.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/switch.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/switch.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/switch.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/table.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/table.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/table.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/table.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/time-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/time-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/time-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/time-input.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/code-editor/code-editor.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/code-editor/code-editor.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/code-editor/code-editor.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/code-editor/code-editor.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/custom-tool-modal/custom-tool-modal.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/custom-tool-modal/custom-tool-modal.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/custom-tool-modal/custom-tool-modal.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/custom-tool-modal/custom-tool-modal.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/tool-command/tool-command.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/tool-command/tool-command.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/tool-command/tool-command.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/components/tool-command/tool-command.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx index 7fec2e786..8fa621f5a 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx @@ -478,7 +478,7 @@ export function ToolInput({ blockId, subBlockId }: ToolInputProps) { const handleOperationChange = (toolIndex: number, operation: string) => { const tool = selectedTools[toolIndex] const subBlockStore = useSubBlockStore.getState() - + // Clear fields when operation changes for Jira if (tool.type === 'jira') { // Clear all fields that might be shared between operations @@ -488,7 +488,7 @@ export function ToolInput({ blockId, subBlockId }: ToolInputProps) { subBlockStore.setValue(blockId, 'projectId', '') subBlockStore.setValue(blockId, 'parentIssue', '') } - + setValue( selectedTools.map((tool, index) => index === toolIndex diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/airtable-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/airtable-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/airtable-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/airtable-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/discord-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/discord-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/discord-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/discord-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/generic-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/generic-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/generic-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/generic-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/github-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/github-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/github-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/github-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/slack-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/slack-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/slack-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/slack-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/stripe-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/stripe-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/stripe-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/stripe-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx similarity index 90% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx index 1b3c1a57d..ca6d7826b 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/telegram-config.tsx @@ -11,7 +11,7 @@ interface TelegramConfigProps { triggerPhrase: string setTriggerPhrase: (value: string) => void isLoadingToken: boolean - testResult: any + testResult: any copied: string | null copyToClipboard: (text: string, type: string) => void testWebhook?: () => void // Optional test function @@ -32,7 +32,6 @@ export function TelegramConfig({ webhookId, webhookUrl, }: TelegramConfigProps) { - return (
            @@ -48,7 +47,7 @@ export function TelegramConfig({ id="telegram-bot-token" value={botToken} onChange={(e) => { - setBotToken(e.target.value); + setBotToken(e.target.value) }} placeholder="123456789:ABCdefGHIjklMNOpqrsTUVwxyz" type="password" @@ -69,7 +68,7 @@ export function TelegramConfig({ id="telegram-trigger-phrase" value={triggerPhrase} onChange={(e) => { - setTriggerPhrase(e.target.value); + setTriggerPhrase(e.target.value) }} placeholder="/start_workflow" required @@ -89,20 +88,21 @@ export function TelegramConfig({
            1. - Message "/newbot" to {' '} + Message "/newbot" to{' '} { - e.stopPropagation(); - window.open('https://t.me/BotFather', '_blank', 'noopener,noreferrer'); - e.preventDefault(); + e.stopPropagation() + window.open('https://t.me/BotFather', '_blank', 'noopener,noreferrer') + e.preventDefault() }} > @BotFather - {' '} in Telegram to create a bot and copy its token. + {' '} + in Telegram to create a bot and copy its token.
            2. Enter your Bot Token and a trigger phrase above.
            3. Ensure your webhook URL uses HTTPS with a valid SSL certificate.
            4. diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/whatsapp-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/whatsapp-config.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/whatsapp-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/providers/whatsapp-config.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-field.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-field.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-field.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-field.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-section.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-section.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-section.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/config-section.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/confirmation.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/confirmation.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/confirmation.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/confirmation.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/copyable.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/copyable.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/copyable.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/copyable.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/instructions-section.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/instructions-section.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/instructions-section.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/instructions-section.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/test-result.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/test-result.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/test-result.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/test-result.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-config-field.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-config-field.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-config-field.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-config-field.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-footer.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-footer.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-footer.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-footer.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-url.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-url.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-url.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/ui/webhook-url.tsx diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx index a51edaa55..2699947bb 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/components/webhook-modal.tsx @@ -17,12 +17,12 @@ import { GenericConfig } from './providers/generic-config' import { GithubConfig } from './providers/github-config' import { SlackConfig } from './providers/slack-config' import { StripeConfig } from './providers/stripe-config' +import { TelegramConfig } from './providers/telegram-config' import { WhatsAppConfig } from './providers/whatsapp-config' import { DeleteConfirmDialog } from './ui/confirmation' import { UnsavedChangesDialog } from './ui/confirmation' import { WebhookDialogFooter } from './ui/webhook-footer' import { WebhookUrlField } from './ui/webhook-url' -import { TelegramConfig } from './providers/telegram-config' const logger = createLogger('WebhookModal') @@ -536,7 +536,8 @@ export function WebhookModal({ if (errorMessage.includes('SSL')) { setTestResult({ success: false, - message: 'Telegram webhooks require HTTPS. Please ensure your domain has a valid SSL certificate.', + message: + 'Telegram webhooks require HTTPS. Please ensure your domain has a valid SSL certificate.', }) } else { setTestResult({ diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx similarity index 99% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx index 75c776e94..05046151e 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/components/webhook/webhook-config.tsx @@ -7,13 +7,13 @@ import { GithubIcon, SlackIcon, StripeIcon, - WhatsAppIcon, TelegramIcon, + WhatsAppIcon, } from '@/components/icons' import { Button } from '@/components/ui/button' import { createLogger } from '@/lib/logs/console-logger' -import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useSubBlockStore } from '@/stores/workflows/subblock/store' +import { useWorkflowStore } from '@/stores/workflows/workflow/store' import { useSubBlockValue } from '../../hooks/use-sub-block-value' import { WebhookModal } from './components/webhook-modal' @@ -433,9 +433,9 @@ export function WebhookConfig({ blockId, subBlockId, isConnecting }: WebhookConf ...workflowValues, [workflowId]: { ...workflowValues, - [blockId]: blockValues - } - } + [blockId]: blockValues, + }, + }, }) // Clear component state diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts similarity index 94% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts index 56380ac93..a7927e073 100644 --- a/sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts +++ b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/hooks/use-sub-block-value.ts @@ -121,7 +121,10 @@ function storeApiKeyValue( } // For provider-based blocks, store the API key under the provider name - if ((blockType === 'agent' || blockType === 'router' || blockType === 'evaluator') && modelValue) { + if ( + (blockType === 'agent' || blockType === 'router' || blockType === 'evaluator') && + modelValue + ) { const provider = getProviderFromModel(modelValue) if (provider && provider !== 'ollama') { subBlockStore.setToolParam(provider, 'apiKey', String(newValue)) @@ -181,7 +184,8 @@ export function useSubBlockValue( ) // Determine if this is a provider-based block type - const isProviderBasedBlock = blockType === 'agent' || blockType === 'router' || blockType === 'evaluator' + const isProviderBasedBlock = + blockType === 'agent' || blockType === 'router' || blockType === 'evaluator' // Compute the modelValue based on block type const modelValue = isProviderBasedBlock ? (modelSubBlockValue as string) : null @@ -238,7 +242,16 @@ export function useSubBlockValue( // Normal handling for non-provider blocks handleStandardBlockApiKey(blockId, subBlockId, blockType, storeValue) } - }, [blockId, subBlockId, blockType, storeValue, isApiKey, isAutoFillEnvVarsEnabled, modelValue, isProviderBasedBlock]) + }, [ + blockId, + subBlockId, + blockType, + storeValue, + isApiKey, + isAutoFillEnvVarsEnabled, + modelValue, + isProviderBasedBlock, + ]) // Monitor for model changes in provider-based blocks useEffect(() => { @@ -273,7 +286,16 @@ export function useSubBlockValue( } } } - }, [blockId, subBlockId, blockType, isApiKey, modelValue, isAutoFillEnvVarsEnabled, storeValue, isProviderBasedBlock]) + }, [ + blockId, + subBlockId, + blockType, + isApiKey, + modelValue, + isAutoFillEnvVarsEnabled, + storeValue, + isProviderBasedBlock, + ]) // Update the ref if the store value changes // This ensures we're always working with the latest value diff --git a/sim/app/w/[id]/components/workflow-block/components/sub-block/sub-block.tsx b/apps/sim/app/w/[id]/components/workflow-block/components/sub-block/sub-block.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-block/components/sub-block/sub-block.tsx rename to apps/sim/app/w/[id]/components/workflow-block/components/sub-block/sub-block.tsx diff --git a/sim/app/w/[id]/components/workflow-block/workflow-block.tsx b/apps/sim/app/w/[id]/components/workflow-block/workflow-block.tsx similarity index 98% rename from sim/app/w/[id]/components/workflow-block/workflow-block.tsx rename to apps/sim/app/w/[id]/components/workflow-block/workflow-block.tsx index 026beb73e..d9a8f08bb 100644 --- a/sim/app/w/[id]/components/workflow-block/workflow-block.tsx +++ b/apps/sim/app/w/[id]/components/workflow-block/workflow-block.tsx @@ -420,12 +420,14 @@ export function WorkflowBlock({ id, data }: NodeProps) {

              {scheduleInfo.scheduleTiming}

              {scheduleInfo.nextRunAt && (

              - Next run: {formatDateTime(new Date(scheduleInfo.nextRunAt), scheduleInfo.timezone)} + Next run:{' '} + {formatDateTime(new Date(scheduleInfo.nextRunAt), scheduleInfo.timezone)}

              )} {scheduleInfo.lastRanAt && (

              - Last run: {formatDateTime(new Date(scheduleInfo.lastRanAt), scheduleInfo.timezone)} + Last run:{' '} + {formatDateTime(new Date(scheduleInfo.lastRanAt), scheduleInfo.timezone)}

              )} diff --git a/sim/app/w/[id]/components/workflow-edge/workflow-edge.tsx b/apps/sim/app/w/[id]/components/workflow-edge/workflow-edge.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-edge/workflow-edge.tsx rename to apps/sim/app/w/[id]/components/workflow-edge/workflow-edge.tsx diff --git a/sim/app/w/[id]/components/workflow-loop/components/loop-input/loop-input.tsx b/apps/sim/app/w/[id]/components/workflow-loop/components/loop-input/loop-input.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-loop/components/loop-input/loop-input.tsx rename to apps/sim/app/w/[id]/components/workflow-loop/components/loop-input/loop-input.tsx diff --git a/sim/app/w/[id]/components/workflow-loop/components/loop-label/loop-label.tsx b/apps/sim/app/w/[id]/components/workflow-loop/components/loop-label/loop-label.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-loop/components/loop-label/loop-label.tsx rename to apps/sim/app/w/[id]/components/workflow-loop/components/loop-label/loop-label.tsx diff --git a/sim/app/w/[id]/components/workflow-loop/workflow-loop.tsx b/apps/sim/app/w/[id]/components/workflow-loop/workflow-loop.tsx similarity index 100% rename from sim/app/w/[id]/components/workflow-loop/workflow-loop.tsx rename to apps/sim/app/w/[id]/components/workflow-loop/workflow-loop.tsx diff --git a/sim/app/w/[id]/hooks/use-block-connections.ts b/apps/sim/app/w/[id]/hooks/use-block-connections.ts similarity index 100% rename from sim/app/w/[id]/hooks/use-block-connections.ts rename to apps/sim/app/w/[id]/hooks/use-block-connections.ts diff --git a/sim/app/w/[id]/hooks/use-code-generation.ts b/apps/sim/app/w/[id]/hooks/use-code-generation.ts similarity index 100% rename from sim/app/w/[id]/hooks/use-code-generation.ts rename to apps/sim/app/w/[id]/hooks/use-code-generation.ts diff --git a/sim/app/w/[id]/hooks/use-keyboard-shortcuts.ts b/apps/sim/app/w/[id]/hooks/use-keyboard-shortcuts.ts similarity index 100% rename from sim/app/w/[id]/hooks/use-keyboard-shortcuts.ts rename to apps/sim/app/w/[id]/hooks/use-keyboard-shortcuts.ts diff --git a/sim/app/w/[id]/hooks/use-workflow-execution.ts b/apps/sim/app/w/[id]/hooks/use-workflow-execution.ts similarity index 53% rename from sim/app/w/[id]/hooks/use-workflow-execution.ts rename to apps/sim/app/w/[id]/hooks/use-workflow-execution.ts index a23cad4b9..3f33b5eb8 100644 --- a/sim/app/w/[id]/hooks/use-workflow-execution.ts +++ b/apps/sim/app/w/[id]/hooks/use-workflow-execution.ts @@ -42,7 +42,11 @@ export function useWorkflowExecution() { } = useExecutionStore() const [executionResult, setExecutionResult] = useState(null) - const persistLogs = async (executionId: string, result: ExecutionResult, streamContent?: string) => { + const persistLogs = async ( + executionId: string, + result: ExecutionResult, + streamContent?: string + ) => { try { // Build trace spans from execution logs const { traceSpans, totalDuration } = buildTraceSpans(result) @@ -58,15 +62,15 @@ export function useWorkflowExecution() { if (streamContent && result.output?.response && typeof streamContent === 'string') { // Update the content with the final streaming content enrichedResult.output.response.content = streamContent - + // Also update any block logs to include the content where appropriate if (enrichedResult.logs) { // Get the streaming block ID from metadata if available - const streamingBlockId = (result.metadata as any)?.streamingBlockId || null; + const streamingBlockId = (result.metadata as any)?.streamingBlockId || null for (const log of enrichedResult.logs) { // Only update the specific agent block that was streamed - const isStreamingBlock = streamingBlockId && log.blockId === streamingBlockId; + const isStreamingBlock = streamingBlockId && log.blockId === streamingBlockId if (isStreamingBlock && log.blockType === 'agent' && log.output?.response) { log.output.response.content = streamContent } @@ -88,7 +92,7 @@ export function useWorkflowExecution() { if (!response.ok) { throw new Error('Failed to persist logs') } - + return executionId } catch (error) { logger.error('Error persisting logs:', { error }) @@ -96,346 +100,359 @@ export function useWorkflowExecution() { } } - const handleRunWorkflow = useCallback(async (workflowInput?: any) => { - if (!activeWorkflowId) return + const handleRunWorkflow = useCallback( + async (workflowInput?: any) => { + if (!activeWorkflowId) return - // Reset execution result and set execution state - setExecutionResult(null) - setIsExecuting(true) + // Reset execution result and set execution state + setExecutionResult(null) + setIsExecuting(true) - // Set debug mode if it's enabled in settings - if (isDebugModeEnabled) { - setIsDebugging(true) - } - - // Check if panel is open and open it if not - const isPanelOpen = usePanelStore.getState().isOpen - if (!isPanelOpen) { - togglePanel() - } - - // Set active tab to console - if (activeTab !== 'console' && activeTab !== 'chat') { - setActiveTab('console') - } - - const executionId = uuidv4() - - // Determine if this is a chat execution - // Only true if the execution is initiated from the chat panel - // or through a chat-specific execution path - const isChatExecution = activeTab === 'chat' && - (workflowInput && typeof workflowInput === 'object' && 'input' in workflowInput) - - // If this is a chat execution, get the selected outputs - let selectedOutputIds: string[] | undefined = undefined - if (isChatExecution && activeWorkflowId) { - // Get selected outputs from chat store - const chatStore = await import('@/stores/panel/chat/store').then(mod => mod.useChatStore) - selectedOutputIds = chatStore.getState().getSelectedWorkflowOutput(activeWorkflowId) - logger.info('Chat execution with selected outputs:', selectedOutputIds) - } - - try { - // Clear any existing state - setDebugContext(null) - - // Use the mergeSubblockState utility to get all block states - const mergedStates = mergeSubblockState(blocks) - const currentBlockStates = Object.entries(mergedStates).reduce( - (acc, [id, block]) => { - acc[id] = Object.entries(block.subBlocks).reduce( - (subAcc, [key, subBlock]) => { - subAcc[key] = subBlock.value - return subAcc - }, - {} as Record - ) - return acc - }, - {} as Record> - ) - - // Get environment variables - const envVars = getAllVariables() - const envVarValues = Object.entries(envVars).reduce( - (acc, [key, variable]) => { - acc[key] = variable.value - return acc - }, - {} as Record - ) - - // Get workflow variables - const workflowVars = activeWorkflowId ? getVariablesByWorkflowId(activeWorkflowId) : [] - const workflowVariables = workflowVars.reduce( - (acc, variable) => { - acc[variable.id] = variable - return acc - }, - {} as Record - ) - - // Create serialized workflow - const workflow = new Serializer().serializeWorkflow(mergedStates, edges, loops) - - // Create executor options with streaming support for chat - const executorOptions: any = { - // Default executor options - workflow, - currentBlockStates, - envVarValues, - workflowInput, - workflowVariables, + // Set debug mode if it's enabled in settings + if (isDebugModeEnabled) { + setIsDebugging(true) } - // Add streaming context for chat executions - if (isChatExecution && selectedOutputIds && selectedOutputIds.length > 0) { - executorOptions.contextExtensions = { - stream: true, - selectedOutputIds, - edges: workflow.connections.map(conn => ({ - source: conn.source, - target: conn.target - })) - } + // Check if panel is open and open it if not + const isPanelOpen = usePanelStore.getState().isOpen + if (!isPanelOpen) { + togglePanel() } - // Create executor and store in global state - const newExecutor = new Executor(executorOptions) - setExecutor(newExecutor) - - // Execute workflow - const result = await newExecutor.execute(activeWorkflowId) - - // Streaming results are handled differently - they won't have a standard result - if (result instanceof ReadableStream) { - logger.info('Received streaming result from executor') - - // For streaming results, we need to handle them in the component - // that initiated the execution (chat panel) - return { - success: true, - stream: result, - } - } - - // Handle StreamingExecution format (combined stream + execution result) - if (result && typeof result === 'object' && 'stream' in result && 'execution' in result) { - logger.info('Received combined stream+execution result from executor') - - // Generate an executionId and store it in the execution metadata so that - // the chat component can persist the logs *after* the stream finishes. - const executionId = uuidv4() - - // Determine which block is streaming - typically the one that matches a selected output ID - let streamingBlockId = null; - if (selectedOutputIds && selectedOutputIds.length > 0 && result.execution.logs) { - // Find the agent block in the logs that matches one of our selected outputs - const streamingBlock = result.execution.logs.find(log => - log.blockType === 'agent' && selectedOutputIds.some(id => id === log.blockId || id.startsWith(`${log.blockId}_`)) - ); - if (streamingBlock) { - streamingBlockId = streamingBlock.blockId; - logger.info(`Identified streaming block: ${streamingBlockId}`); - } - } - - // Attach streaming / source metadata and the newly generated executionId - result.execution.metadata = { - ...(result.execution.metadata || {}), - executionId, - source: isChatExecution ? 'chat' : 'manual', - streamingBlockId, // Add the block ID to the metadata - } as any - - // Clean up any response objects with zero tokens in agent blocks to avoid confusion in console - if (result.execution.logs && Array.isArray(result.execution.logs)) { - result.execution.logs.forEach((log: any) => { - if (log.blockType === 'agent' && log.output?.response) { - const response = log.output.response; - - // Check for zero tokens that will be estimated later - if (response.tokens && - (!response.tokens.completion || response.tokens.completion === 0) && - (!response.toolCalls || !response.toolCalls.list || response.toolCalls.list.length === 0)) { - - // Remove tokens from console display to avoid confusion - // They'll be properly estimated in the execution logger - delete response.tokens; - } - } - }); - } - - // Mark the execution as streaming so that downstream code can recognise it - (result.execution as any).isStreaming = true - - // Return both the stream and the execution object so the caller (chat panel) - // can collect the full content and then persist the logs in one go. - // Also include processingPromise if available to ensure token counts are final - return { - success: true, - stream: result.stream, - execution: result.execution, - processingPromise: (result as any).processingPromise - } + // Set active tab to console + if (activeTab !== 'console' && activeTab !== 'chat') { + setActiveTab('console') } - // Add metadata about source being chat if applicable - if (isChatExecution) { - // Use type assertion for adding custom metadata - (result as any).metadata = { - ...(result.metadata || {}), - source: 'chat' - } + const executionId = uuidv4() + + // Determine if this is a chat execution + // Only true if the execution is initiated from the chat panel + // or through a chat-specific execution path + const isChatExecution = + activeTab === 'chat' && + workflowInput && + typeof workflowInput === 'object' && + 'input' in workflowInput + + // If this is a chat execution, get the selected outputs + let selectedOutputIds: string[] | undefined = undefined + if (isChatExecution && activeWorkflowId) { + // Get selected outputs from chat store + const chatStore = await import('@/stores/panel/chat/store').then((mod) => mod.useChatStore) + selectedOutputIds = chatStore.getState().getSelectedWorkflowOutput(activeWorkflowId) + logger.info('Chat execution with selected outputs:', selectedOutputIds) } - // If we're in debug mode, store the execution context for later steps - if (result.metadata?.isDebugSession && result.metadata.context) { - setDebugContext(result.metadata.context) + try { + // Clear any existing state + setDebugContext(null) - // Make sure to update pending blocks - if (result.metadata.pendingBlocks) { - setPendingBlocks(result.metadata.pendingBlocks) - } - } else { - // Normal execution completed - start with UI updates - setExecutionResult(result) - - // For better UI responsiveness, update state immediately - if (!isDebugModeEnabled) { - // Reset execution states right away for UI to update - setIsExecuting(false) - setIsDebugging(false) - setActiveBlocks(new Set()) - } - - // Show notification - addNotification( - result.success ? 'console' : 'error', - result.success - ? 'Workflow completed successfully' - : `Workflow execution failed: ${result.error}`, - activeWorkflowId + // Use the mergeSubblockState utility to get all block states + const mergedStates = mergeSubblockState(blocks) + const currentBlockStates = Object.entries(mergedStates).reduce( + (acc, [id, block]) => { + acc[id] = Object.entries(block.subBlocks).reduce( + (subAcc, [key, subBlock]) => { + subAcc[key] = subBlock.value + return subAcc + }, + {} as Record + ) + return acc + }, + {} as Record> ) - // In non-debug mode, persist logs (no need to wait for this) - // We explicitly don't await this to avoid blocking UI updates - persistLogs(executionId, result).catch((err) => { - logger.error('Error persisting logs:', { error: err }) - }) - } + // Get environment variables + const envVars = getAllVariables() + const envVarValues = Object.entries(envVars).reduce( + (acc, [key, variable]) => { + acc[key] = variable.value + return acc + }, + {} as Record + ) - return result - } catch (error: any) { - logger.error('Workflow Execution Error:', error) + // Get workflow variables + const workflowVars = activeWorkflowId ? getVariablesByWorkflowId(activeWorkflowId) : [] + const workflowVariables = workflowVars.reduce( + (acc, variable) => { + acc[variable.id] = variable + return acc + }, + {} as Record + ) - // Properly extract error message ensuring it's never undefined - let errorMessage = 'Unknown error' + // Create serialized workflow + const workflow = new Serializer().serializeWorkflow(mergedStates, edges, loops) - if (error instanceof Error) { - errorMessage = error.message || `Error: ${String(error)}` - } else if (typeof error === 'string') { - errorMessage = error - } else if (error && typeof error === 'object') { - // Fix the "undefined (undefined)" pattern specifically - if ( - error.message === 'undefined (undefined)' || - (error.error && - typeof error.error === 'object' && - error.error.message === 'undefined (undefined)') - ) { + // Create executor options with streaming support for chat + const executorOptions: any = { + // Default executor options + workflow, + currentBlockStates, + envVarValues, + workflowInput, + workflowVariables, + } + + // Add streaming context for chat executions + if (isChatExecution && selectedOutputIds && selectedOutputIds.length > 0) { + executorOptions.contextExtensions = { + stream: true, + selectedOutputIds, + edges: workflow.connections.map((conn) => ({ + source: conn.source, + target: conn.target, + })), + } + } + + // Create executor and store in global state + const newExecutor = new Executor(executorOptions) + setExecutor(newExecutor) + + // Execute workflow + const result = await newExecutor.execute(activeWorkflowId) + + // Streaming results are handled differently - they won't have a standard result + if (result instanceof ReadableStream) { + logger.info('Received streaming result from executor') + + // For streaming results, we need to handle them in the component + // that initiated the execution (chat panel) + return { + success: true, + stream: result, + } + } + + // Handle StreamingExecution format (combined stream + execution result) + if (result && typeof result === 'object' && 'stream' in result && 'execution' in result) { + logger.info('Received combined stream+execution result from executor') + + // Generate an executionId and store it in the execution metadata so that + // the chat component can persist the logs *after* the stream finishes. + const executionId = uuidv4() + + // Determine which block is streaming - typically the one that matches a selected output ID + let streamingBlockId = null + if (selectedOutputIds && selectedOutputIds.length > 0 && result.execution.logs) { + // Find the agent block in the logs that matches one of our selected outputs + const streamingBlock = result.execution.logs.find( + (log) => + log.blockType === 'agent' && + selectedOutputIds.some( + (id) => id === log.blockId || id.startsWith(`${log.blockId}_`) + ) + ) + if (streamingBlock) { + streamingBlockId = streamingBlock.blockId + logger.info(`Identified streaming block: ${streamingBlockId}`) + } + } + + // Attach streaming / source metadata and the newly generated executionId + result.execution.metadata = { + ...(result.execution.metadata || {}), + executionId, + source: isChatExecution ? 'chat' : 'manual', + streamingBlockId, // Add the block ID to the metadata + } as any + + // Clean up any response objects with zero tokens in agent blocks to avoid confusion in console + if (result.execution.logs && Array.isArray(result.execution.logs)) { + result.execution.logs.forEach((log: any) => { + if (log.blockType === 'agent' && log.output?.response) { + const response = log.output.response + + // Check for zero tokens that will be estimated later + if ( + response.tokens && + (!response.tokens.completion || response.tokens.completion === 0) && + (!response.toolCalls || + !response.toolCalls.list || + response.toolCalls.list.length === 0) + ) { + // Remove tokens from console display to avoid confusion + // They'll be properly estimated in the execution logger + delete response.tokens + } + } + }) + } + + // Mark the execution as streaming so that downstream code can recognise it + ;(result.execution as any).isStreaming = true + + // Return both the stream and the execution object so the caller (chat panel) + // can collect the full content and then persist the logs in one go. + // Also include processingPromise if available to ensure token counts are final + return { + success: true, + stream: result.stream, + execution: result.execution, + processingPromise: (result as any).processingPromise, + } + } + + // Add metadata about source being chat if applicable + if (isChatExecution) { + // Use type assertion for adding custom metadata + ;(result as any).metadata = { + ...(result.metadata || {}), + source: 'chat', + } + } + + // If we're in debug mode, store the execution context for later steps + if (result.metadata?.isDebugSession && result.metadata.context) { + setDebugContext(result.metadata.context) + + // Make sure to update pending blocks + if (result.metadata.pendingBlocks) { + setPendingBlocks(result.metadata.pendingBlocks) + } + } else { + // Normal execution completed - start with UI updates + setExecutionResult(result) + + // For better UI responsiveness, update state immediately + if (!isDebugModeEnabled) { + // Reset execution states right away for UI to update + setIsExecuting(false) + setIsDebugging(false) + setActiveBlocks(new Set()) + } + + // Show notification + addNotification( + result.success ? 'console' : 'error', + result.success + ? 'Workflow completed successfully' + : `Workflow execution failed: ${result.error}`, + activeWorkflowId + ) + + // In non-debug mode, persist logs (no need to wait for this) + // We explicitly don't await this to avoid blocking UI updates + persistLogs(executionId, result).catch((err) => { + logger.error('Error persisting logs:', { error: err }) + }) + } + + return result + } catch (error: any) { + logger.error('Workflow Execution Error:', error) + + // Properly extract error message ensuring it's never undefined + let errorMessage = 'Unknown error' + + if (error instanceof Error) { + errorMessage = error.message || `Error: ${String(error)}` + } else if (typeof error === 'string') { + errorMessage = error + } else if (error && typeof error === 'object') { + // Fix the "undefined (undefined)" pattern specifically + if ( + error.message === 'undefined (undefined)' || + (error.error && + typeof error.error === 'object' && + error.error.message === 'undefined (undefined)') + ) { + errorMessage = 'API request failed - no specific error details available' + } + // Try to extract error details from potential API or execution errors + else if (error.message) { + errorMessage = error.message + } else if (error.error && typeof error.error === 'string') { + errorMessage = error.error + } else if (error.error && typeof error.error === 'object' && error.error.message) { + errorMessage = error.error.message + } else { + // Last resort: stringify the whole object + try { + errorMessage = `Error details: ${JSON.stringify(error)}` + } catch { + errorMessage = 'Error occurred but details could not be displayed' + } + } + } + + // Ensure errorMessage is never "undefined (undefined)" + if (errorMessage === 'undefined (undefined)') { errorMessage = 'API request failed - no specific error details available' } - // Try to extract error details from potential API or execution errors - else if (error.message) { - errorMessage = error.message - } else if (error.error && typeof error.error === 'string') { - errorMessage = error.error - } else if (error.error && typeof error.error === 'object' && error.error.message) { - errorMessage = error.error.message + + // Set error result and show notification immediately + const errorResult = { + success: false, + output: { response: {} }, + error: errorMessage, + logs: [], + } + + // Update UI state immediately for better responsiveness + setExecutionResult(errorResult) + setIsExecuting(false) + setIsDebugging(false) + setActiveBlocks(new Set()) + + // Create a more user-friendly notification message + let notificationMessage = `Workflow execution failed` + + // Add URL for HTTP errors + if (error && error.request && error.request.url) { + // Don't show empty URL errors + if (error.request.url && error.request.url.trim() !== '') { + notificationMessage += `: Request to ${error.request.url} failed` + + // Add status if available + if (error.status) { + notificationMessage += ` (Status: ${error.status})` + } + } } else { - // Last resort: stringify the whole object - try { - errorMessage = `Error details: ${JSON.stringify(error)}` - } catch { - errorMessage = 'Error occurred but details could not be displayed' - } + // Regular errors + notificationMessage += `: ${errorMessage}` } - } - // Ensure errorMessage is never "undefined (undefined)" - if (errorMessage === 'undefined (undefined)') { - errorMessage = 'API request failed - no specific error details available' - } - - // Set error result and show notification immediately - const errorResult = { - success: false, - output: { response: {} }, - error: errorMessage, - logs: [], - } - - // Update UI state immediately for better responsiveness - setExecutionResult(errorResult) - setIsExecuting(false) - setIsDebugging(false) - setActiveBlocks(new Set()) - - // Create a more user-friendly notification message - let notificationMessage = `Workflow execution failed` - - // Add URL for HTTP errors - if (error && error.request && error.request.url) { - // Don't show empty URL errors - if (error.request.url && error.request.url.trim() !== '') { - notificationMessage += `: Request to ${error.request.url} failed` - - // Add status if available - if (error.status) { - notificationMessage += ` (Status: ${error.status})` - } + // Safely show error notification + try { + addNotification('error', notificationMessage, activeWorkflowId) + } catch (notificationError) { + logger.error('Error showing error notification:', notificationError) + // Fallback console error + console.error('Workflow execution failed:', errorMessage) } - } else { - // Regular errors - notificationMessage += `: ${errorMessage}` + + // Also send the error result to the API (don't await to keep UI responsive) + persistLogs(executionId, errorResult).catch((err) => { + logger.error('Error persisting logs:', { error: err }) + }) + + return errorResult } - - // Safely show error notification - try { - addNotification('error', notificationMessage, activeWorkflowId) - } catch (notificationError) { - logger.error('Error showing error notification:', notificationError) - // Fallback console error - console.error('Workflow execution failed:', errorMessage) - } - - // Also send the error result to the API (don't await to keep UI responsive) - persistLogs(executionId, errorResult).catch((err) => { - logger.error('Error persisting logs:', { error: err }) - }) - - return errorResult - } - }, [ - activeWorkflowId, - blocks, - edges, - loops, - addNotification, - toggleConsole, - togglePanel, - setActiveTab, - getAllVariables, - getVariablesByWorkflowId, - setIsExecuting, - setIsDebugging, - isDebugModeEnabled, - isDebugging, - setActiveBlocks, - ]) + }, + [ + activeWorkflowId, + blocks, + edges, + loops, + addNotification, + toggleConsole, + togglePanel, + setActiveTab, + getAllVariables, + getVariablesByWorkflowId, + setIsExecuting, + setIsDebugging, + isDebugModeEnabled, + isDebugging, + setActiveBlocks, + ] + ) /** * Handles stepping through workflow execution in debug mode diff --git a/sim/app/w/[id]/layout.tsx b/apps/sim/app/w/[id]/layout.tsx similarity index 100% rename from sim/app/w/[id]/layout.tsx rename to apps/sim/app/w/[id]/layout.tsx diff --git a/sim/app/w/[id]/page.tsx b/apps/sim/app/w/[id]/page.tsx similarity index 100% rename from sim/app/w/[id]/page.tsx rename to apps/sim/app/w/[id]/page.tsx diff --git a/sim/app/w/[id]/workflow.tsx b/apps/sim/app/w/[id]/workflow.tsx similarity index 100% rename from sim/app/w/[id]/workflow.tsx rename to apps/sim/app/w/[id]/workflow.tsx diff --git a/sim/app/w/components/providers/providers.tsx b/apps/sim/app/w/components/providers/providers.tsx similarity index 100% rename from sim/app/w/components/providers/providers.tsx rename to apps/sim/app/w/components/providers/providers.tsx diff --git a/sim/app/w/components/providers/theme-provider.tsx b/apps/sim/app/w/components/providers/theme-provider.tsx similarity index 100% rename from sim/app/w/components/providers/theme-provider.tsx rename to apps/sim/app/w/components/providers/theme-provider.tsx diff --git a/sim/app/w/components/sidebar/components/help-modal/components/help-form/help-form.tsx b/apps/sim/app/w/components/sidebar/components/help-modal/components/help-form/help-form.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/help-modal/components/help-form/help-form.tsx rename to apps/sim/app/w/components/sidebar/components/help-modal/components/help-form/help-form.tsx diff --git a/sim/app/w/components/sidebar/components/help-modal/help-modal.tsx b/apps/sim/app/w/components/sidebar/components/help-modal/help-modal.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/help-modal/help-modal.tsx rename to apps/sim/app/w/components/sidebar/components/help-modal/help-modal.tsx diff --git a/sim/app/w/components/sidebar/components/nav-section/nav-section.tsx b/apps/sim/app/w/components/sidebar/components/nav-section/nav-section.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/nav-section/nav-section.tsx rename to apps/sim/app/w/components/sidebar/components/nav-section/nav-section.tsx diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/account/account.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/account/account.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/settings-modal/components/account/account.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/account/account.tsx diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/api-keys/api-keys.tsx diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/credentials/credentials.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/credentials/credentials.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/settings-modal/components/credentials/credentials.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/credentials/credentials.tsx diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/environment/environment.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/environment/environment.tsx similarity index 100% rename from sim/app/w/components/sidebar/components/settings-modal/components/environment/environment.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/environment/environment.tsx diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx similarity index 88% rename from sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx index fbfea88f7..fbc2a4f4c 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/components/general/general.tsx @@ -1,6 +1,7 @@ -import { useRouter } from 'next/navigation' -import { Info, AlertTriangle } from 'lucide-react' import { useEffect, useState } from 'react' +import { useRouter } from 'next/navigation' +import { AlertTriangle, Info } from 'lucide-react' +import { Alert, AlertDescription } from '@/components/ui/alert' import { AlertDialog, AlertDialogAction, @@ -24,9 +25,8 @@ import { import { Skeleton } from '@/components/ui/skeleton' import { Switch } from '@/components/ui/switch' import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip' -import { Alert, AlertDescription } from '@/components/ui/alert' -import { resetAllStores } from '@/stores' import { useGeneralStore } from '@/stores/settings/general/store' +import { resetAllStores } from '@/stores' const TOOLTIPS = { debugMode: 'Enable visual debugging information during execution.', @@ -38,20 +38,20 @@ const TOOLTIPS = { export function General() { const router = useRouter() const [retryCount, setRetryCount] = useState(0) - - const isLoading = useGeneralStore(state => state.isLoading) - const error = useGeneralStore(state => state.error) - const theme = useGeneralStore(state => state.theme) - const isAutoConnectEnabled = useGeneralStore(state => state.isAutoConnectEnabled) - const isDebugModeEnabled = useGeneralStore(state => state.isDebugModeEnabled) - const isAutoFillEnvVarsEnabled = useGeneralStore(state => state.isAutoFillEnvVarsEnabled) - - const setTheme = useGeneralStore(state => state.setTheme) - const toggleAutoConnect = useGeneralStore(state => state.toggleAutoConnect) - const toggleDebugMode = useGeneralStore(state => state.toggleDebugMode) - const toggleAutoFillEnvVars = useGeneralStore(state => state.toggleAutoFillEnvVars) - const loadSettings = useGeneralStore(state => state.loadSettings) - + + const isLoading = useGeneralStore((state) => state.isLoading) + const error = useGeneralStore((state) => state.error) + const theme = useGeneralStore((state) => state.theme) + const isAutoConnectEnabled = useGeneralStore((state) => state.isAutoConnectEnabled) + const isDebugModeEnabled = useGeneralStore((state) => state.isDebugModeEnabled) + const isAutoFillEnvVarsEnabled = useGeneralStore((state) => state.isAutoFillEnvVarsEnabled) + + const setTheme = useGeneralStore((state) => state.setTheme) + const toggleAutoConnect = useGeneralStore((state) => state.toggleAutoConnect) + const toggleDebugMode = useGeneralStore((state) => state.toggleDebugMode) + const toggleAutoFillEnvVars = useGeneralStore((state) => state.toggleAutoFillEnvVars) + const loadSettings = useGeneralStore((state) => state.loadSettings) + useEffect(() => { const loadData = async () => { await loadSettings(retryCount > 0) @@ -62,19 +62,19 @@ export function General() { const handleThemeChange = (value: 'system' | 'light' | 'dark') => { setTheme(value) } - + const handleDebugModeChange = (checked: boolean) => { if (checked !== isDebugModeEnabled) { toggleDebugMode() } } - + const handleAutoConnectChange = (checked: boolean) => { if (checked !== isAutoConnectEnabled) { toggleAutoConnect() } } - + const handleAutoFillEnvVarsChange = (checked: boolean) => { if (checked !== isAutoFillEnvVarsEnabled) { toggleAutoFillEnvVars() @@ -87,7 +87,7 @@ export function General() { } const handleRetry = () => { - setRetryCount(prev => prev + 1) + setRetryCount((prev) => prev + 1) } return ( @@ -97,12 +97,7 @@ export function General() { Failed to load settings: {error} - @@ -127,11 +122,7 @@ export function General() { Theme
            - diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx similarity index 75% rename from sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx index 77ca1cd35..a11180894 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/components/privacy/privacy.tsx @@ -2,24 +2,25 @@ import { useEffect } from 'react' import { Info } from 'lucide-react' -import { Switch } from '@/components/ui/switch' -import { Label } from '@/components/ui/label' import { Button } from '@/components/ui/button' +import { Label } from '@/components/ui/label' import { Skeleton } from '@/components/ui/skeleton' +import { Switch } from '@/components/ui/switch' import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip' import { useGeneralStore } from '@/stores/settings/general/store' const TOOLTIPS = { - telemetry: 'We collect anonymous data about feature usage, performance, and errors to improve the application.', + telemetry: + 'We collect anonymous data about feature usage, performance, and errors to improve the application.', } export function Privacy() { - const isLoading = useGeneralStore(state => state.isLoading) - const telemetryEnabled = useGeneralStore(state => state.telemetryEnabled) - const setTelemetryEnabled = useGeneralStore(state => state.setTelemetryEnabled) - const setTelemetryNotifiedUser = useGeneralStore(state => state.setTelemetryNotifiedUser) - const loadSettings = useGeneralStore(state => state.loadSettings) - + const isLoading = useGeneralStore((state) => state.isLoading) + const telemetryEnabled = useGeneralStore((state) => state.telemetryEnabled) + const setTelemetryEnabled = useGeneralStore((state) => state.setTelemetryEnabled) + const setTelemetryNotifiedUser = useGeneralStore((state) => state.setTelemetryNotifiedUser) + const loadSettings = useGeneralStore((state) => state.loadSettings) + useEffect(() => { loadSettings() }, [loadSettings]) @@ -29,7 +30,7 @@ export function Privacy() { if (checked) { setTelemetryNotifiedUser(true) - + if (typeof window !== 'undefined') { fetch('/api/telemetry', { method: 'POST', @@ -85,23 +86,23 @@ export function Privacy() { )}
          - +

          - We use OpenTelemetry to collect anonymous usage data to improve Sim Studio. - All data is collected in accordance with our privacy policy, and you can opt-out at any time. - This setting applies to your account on all devices. + We use OpenTelemetry to collect anonymous usage data to improve Sim Studio. All data is + collected in accordance with our privacy policy, and you can opt-out at any time. This + setting applies to your account on all devices.

          ) -} +} const SettingRowSkeleton = () => ( -
          -
          - -
          - +
          +
          +
          - ) \ No newline at end of file + +
          +) diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx similarity index 75% rename from sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx index b36bcc2bb..f3881b6e1 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/components/settings-navigation/settings-navigation.tsx @@ -1,17 +1,42 @@ -import { Key, KeyRound, KeySquare, Settings, UserCircle, CreditCard, Users, Shield } from 'lucide-react' -import { cn } from '@/lib/utils' +import { + CreditCard, + Key, + KeyRound, + KeySquare, + Settings, + Shield, + UserCircle, + Users, +} from 'lucide-react' import { isDev } from '@/lib/environment' +import { cn } from '@/lib/utils' interface SettingsNavigationProps { activeSection: string onSectionChange: ( - section: 'general' | 'environment' | 'account' | 'credentials' | 'apikeys' | 'subscription' | 'team' | 'privacy' + section: + | 'general' + | 'environment' + | 'account' + | 'credentials' + | 'apikeys' + | 'subscription' + | 'team' + | 'privacy' ) => void isTeam?: boolean } type NavigationItem = { - id: 'general' | 'environment' | 'account' | 'credentials' | 'apikeys' | 'subscription' | 'team' | 'privacy' + id: + | 'general' + | 'environment' + | 'account' + | 'credentials' + | 'apikeys' + | 'subscription' + | 'team' + | 'privacy' label: string icon: React.ComponentType<{ className?: string }> hideInDev?: boolean @@ -64,18 +89,22 @@ const allNavigationItems: NavigationItem[] = [ }, ] -export function SettingsNavigation({ activeSection, onSectionChange, isTeam = false }: SettingsNavigationProps) { - const navigationItems = allNavigationItems.filter(item => { +export function SettingsNavigation({ + activeSection, + onSectionChange, + isTeam = false, +}: SettingsNavigationProps) { + const navigationItems = allNavigationItems.filter((item) => { // Hide items based on development environment if (item.hideInDev && isDev) { return false } - + // Hide team tab if user doesn't have team subscription if (item.requiresTeam && !isTeam) { return false } - + return true }) diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx similarity index 80% rename from sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx index 9b36254bd..ee04a10a8 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/components/subscription/subscription.tsx @@ -1,10 +1,7 @@ -import { useState, useEffect, useMemo } from 'react' -import { client, useSession, useActiveOrganization, useSubscription } from '@/lib/auth-client' -import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' +import { useEffect, useMemo, useState } from 'react' import { AlertCircle } from 'lucide-react' +import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' import { Button } from '@/components/ui/button' -import { Progress } from '@/components/ui/progress' -import { Skeleton } from '@/components/ui/skeleton' import { Dialog, DialogContent, @@ -13,6 +10,8 @@ import { DialogHeader, DialogTitle, } from '@/components/ui/dialog' +import { Label } from '@/components/ui/label' +import { Progress } from '@/components/ui/progress' import { Select, SelectContent, @@ -20,7 +19,8 @@ import { SelectTrigger, SelectValue, } from '@/components/ui/select' -import { Label } from '@/components/ui/label' +import { Skeleton } from '@/components/ui/skeleton' +import { client, useActiveOrganization, useSession, useSubscription } from '@/lib/auth-client' import { createLogger } from '@/lib/logs/console-logger' const logger = createLogger('Subscription') @@ -35,7 +35,7 @@ interface SubscriptionProps { } const useSubscriptionData = ( - userId: string | null | undefined, + userId: string | null | undefined, activeOrgId: string | null | undefined, cachedIsPro?: boolean, cachedIsTeam?: boolean, @@ -51,25 +51,29 @@ const useSubscriptionData = ( isExceeded: boolean currentUsage: number limit: number - }>(cachedUsageData || { - percentUsed: 0, - isWarning: false, - isExceeded: false, - currentUsage: 0, - limit: 0 - }) + }>( + cachedUsageData || { + percentUsed: 0, + isWarning: false, + isExceeded: false, + currentUsage: 0, + limit: 0, + } + ) const [subscriptionData, setSubscriptionData] = useState(cachedSubscriptionData || null) - const [loading, setLoading] = useState(isParentLoading !== undefined ? isParentLoading : true) + const [loading, setLoading] = useState( + isParentLoading !== undefined ? isParentLoading : true + ) const [error, setError] = useState(null) const subscription = useSubscription() useEffect(() => { if ( - isParentLoading !== undefined || - (cachedIsPro !== undefined && - cachedIsTeam !== undefined && - cachedUsageData && - cachedSubscriptionData) + isParentLoading !== undefined || + (cachedIsPro !== undefined && + cachedIsTeam !== undefined && + cachedUsageData && + cachedSubscriptionData) ) { if (cachedIsPro !== undefined) setIsPro(cachedIsPro) if (cachedIsTeam !== undefined) setIsTeam(cachedIsTeam) @@ -81,95 +85,93 @@ const useSubscriptionData = ( async function loadSubscriptionData() { if (!userId) return - + try { setLoading(true) setError(null) - + // Fetch subscription status and usage data in parallel const [proStatusResponse, usageResponse] = await Promise.all([ fetch('/api/user/subscription'), - fetch('/api/user/usage') + fetch('/api/user/usage'), ]) - + if (!proStatusResponse.ok) { throw new Error('Failed to fetch subscription status') } if (!usageResponse.ok) { throw new Error('Failed to fetch usage data') } - + // Process the responses const proStatusData = await proStatusResponse.json() setIsPro(proStatusData.isPro) setIsTeam(proStatusData.isTeam) - + const usageDataResponse = await usageResponse.json() setUsageData(usageDataResponse) - - logger.info('Subscription status and usage data retrieved', { - isPro: proStatusData.isPro, + + logger.info('Subscription status and usage data retrieved', { + isPro: proStatusData.isPro, isTeam: proStatusData.isTeam, - usage: usageDataResponse + usage: usageDataResponse, }) - + // Main subscription logic - prioritize organization team subscription let activeSubscription = null - + // First check if user has an active organization with a team subscription if (activeOrgId) { logger.info('Checking organization subscription first', { orgId: activeOrgId }) - + // Get the organization's subscription const result = await subscription.list({ - query: { referenceId: activeOrgId } + query: { referenceId: activeOrgId }, }) - + const orgSubscriptions = result.data const orgSubError = 'error' in result ? result.error : null - + if (orgSubError) { logger.error('Error fetching organization subscription details', orgSubError) } else if (orgSubscriptions) { // Find active team subscription for the organization activeSubscription = orgSubscriptions.find( - sub => sub.status === 'active' && sub.plan === 'team' + (sub) => sub.status === 'active' && sub.plan === 'team' ) - + if (activeSubscription) { logger.info('Using organization team subscription as primary', { id: activeSubscription.id, - seats: activeSubscription.seats + seats: activeSubscription.seats, }) } } } - + // If no org team subscription was found, check for personal subscription if (!activeSubscription) { // Fetch detailed subscription data for the user const result = await subscription.list() - + const userSubscriptions = result.data const userSubError = 'error' in result ? result.error : null - + if (userSubError) { logger.error('Error fetching user subscription details', userSubError) } else if (userSubscriptions) { // Find active subscription for the user - activeSubscription = userSubscriptions.find( - sub => sub.status === 'active' - ) + activeSubscription = userSubscriptions.find((sub) => sub.status === 'active') } } - + if (activeSubscription) { - logger.info('Using active subscription', { + logger.info('Using active subscription', { id: activeSubscription.id, plan: activeSubscription.plan, - status: activeSubscription.status + status: activeSubscription.status, }) - + setSubscriptionData(activeSubscription) } else { logger.warn('No active subscription found') @@ -181,9 +183,18 @@ const useSubscriptionData = ( setLoading(false) } } - + loadSubscriptionData() - }, [userId, activeOrgId, subscription, cachedIsPro, cachedIsTeam, cachedUsageData, cachedSubscriptionData, isParentLoading]) + }, [ + userId, + activeOrgId, + subscription, + cachedIsPro, + cachedIsTeam, + cachedUsageData, + cachedSubscriptionData, + isParentLoading, + ]) return { isPro, isTeam, usageData, subscriptionData, loading, error } } @@ -194,21 +205,21 @@ export function Subscription({ cachedIsTeam, cachedUsageData, cachedSubscriptionData, - isLoading + isLoading, }: SubscriptionProps) { const { data: session } = useSession() const { data: activeOrg } = useActiveOrganization() const subscription = useSubscription() - - const { - isPro, - isTeam, - usageData, - subscriptionData, - loading, - error: subscriptionError + + const { + isPro, + isTeam, + usageData, + subscriptionData, + loading, + error: subscriptionError, } = useSubscriptionData( - session?.user?.id, + session?.user?.id, activeOrg?.id, cachedIsPro, cachedIsTeam, @@ -216,7 +227,7 @@ export function Subscription({ cachedSubscriptionData, isLoading ) - + const [isCanceling, setIsCanceling] = useState(false) const [error, setError] = useState(null) const [isTeamDialogOpen, setIsTeamDialogOpen] = useState(false) @@ -236,17 +247,17 @@ export function Subscription({ setError('You need to be logged in to upgrade your subscription') return } - + setIsUpgrading(true) setError(null) - + try { const result = await subscription.upgrade({ plan: plan, successUrl: window.location.href, cancelUrl: window.location.href, }) - + if ('error' in result && result.error) { setError(result.error.message || `There was an error upgrading to the ${plan} plan`) logger.error('Subscription upgrade error:', result.error) @@ -264,15 +275,15 @@ export function Subscription({ setError('You need to be logged in to cancel your subscription') return } - + setIsCanceling(true) setError(null) - + try { const result = await subscription.cancel({ returnUrl: window.location.href, }) - + if ('error' in result && result.error) { setError(result.error.message || 'There was an error canceling your subscription') logger.error('Subscription cancellation error:', result.error) @@ -294,10 +305,10 @@ export function Subscription({ setError('You need to be logged in to upgrade your team subscription') return } - + setIsUpgradingTeam(true) setError(null) - + try { const result = await subscription.upgrade({ plan: 'team', @@ -305,7 +316,7 @@ export function Subscription({ successUrl: window.location.href, cancelUrl: window.location.href, }) - + if ('error' in result && result.error) { setError(result.error.message || 'There was an error upgrading to the team plan') logger.error('Team subscription upgrade error:', result.error) @@ -324,28 +335,28 @@ export function Subscription({ return (

          Subscription Plans

          - + {error && ( {error} )} - + {(usageData.isWarning || usageData.isExceeded) && !isPro && ( {usageData.isExceeded ? 'Usage Limit Exceeded' : 'Usage Warning'} - You've used {usageData.percentUsed}% of your free tier limit - ({usageData.currentUsage.toFixed(2)}$ of {usageData.limit}$). - {usageData.isExceeded - ? ' You have exceeded your limit. Upgrade to Pro to continue using all features.' + You've used {usageData.percentUsed}% of your free tier limit ( + {usageData.currentUsage.toFixed(2)}$ of {usageData.limit}$). + {usageData.isExceeded + ? ' You have exceeded your limit. Upgrade to Pro to continue using all features.' : ' Upgrade to Pro to avoid any service interruptions.'} )} - + {loading ? ( ) : ( @@ -354,14 +365,16 @@ export function Subscription({ {/* Free Tier */}

          Free Tier

          -

          For individual users and small projects

          - +

          + For individual users and small projects +

          +
          • β€’ ${!isPro ? 5 : usageData.limit} of inference credits
          • β€’ Basic features
          • β€’ No sharing capabilities
          - + {!isPro && (
          @@ -370,50 +383,43 @@ export function Subscription({ {usageData.currentUsage.toFixed(2)}$ / {usageData.limit}$
          - *]:bg-destructive' - : usageData.isWarning - ? 'bg-muted [&>*]:bg-amber-500' - : '' + usageData.isExceeded + ? 'bg-muted [&>*]:bg-destructive' + : usageData.isWarning + ? 'bg-muted [&>*]:bg-amber-500' + : '' }`} />
          )} - +
          {!isPro ? (
          Current Plan
          ) : ( - )}
          - + {/* Pro Tier */}

          Pro Tier

          For professional users and teams

          - +
          • β€’ ${isPro && !isTeam ? usageData.limit : 20} of inference credits
          • β€’ All features included
          • β€’ Workflow sharing capabilities
          - + {isPro && !isTeam && (
          @@ -422,51 +428,53 @@ export function Subscription({ {usageData.currentUsage.toFixed(2)}$ / {usageData.limit}$
          - *]:bg-destructive' - : usageData.isWarning - ? 'bg-muted [&>*]:bg-amber-500' - : '' + usageData.isExceeded + ? 'bg-muted [&>*]:bg-destructive' + : usageData.isWarning + ? 'bg-muted [&>*]:bg-amber-500' + : '' }`} />
          )} - +
          {isPro && !isTeam ? (
          Current Plan
          ) : ( - )}
          - + {/* Team Tier */}

          Team Tier

          For collaborative teams

          - +
          • β€’ $40 of inference credits per seat
          • β€’ All Pro features included
          • β€’ Real-time multiplayer collaboration
          • β€’ Shared workspace for team members
          - + {isTeam && (
          @@ -475,62 +483,63 @@ export function Subscription({ {usageData.currentUsage.toFixed(2)}$ / {(subscriptionData?.seats || 1) * 40}$
          - *]:bg-destructive' - : usageData.isWarning - ? 'bg-muted [&>*]:bg-amber-500' - : '' + usageData.isExceeded + ? 'bg-muted [&>*]:bg-destructive' + : usageData.isWarning + ? 'bg-muted [&>*]:bg-amber-500' + : '' }`} /> - +
          Team Size - {subscriptionData?.seats || 1} {subscriptionData?.seats === 1 ? 'seat' : 'seats'} + + {subscriptionData?.seats || 1}{' '} + {subscriptionData?.seats === 1 ? 'seat' : 'seats'} +
          )} - +
          {isTeam ? (
          Current Plan
          ) : ( - )}
          - + {/* Enterprise Tier */}

          Enterprise

          -

          For larger teams and organizations

          - +

          + For larger teams and organizations +

          +
          • β€’ Custom cost limits
          • β€’ Priority support
          • β€’ Custom integrations
          • β€’ Dedicated account manager
          - +
          -
          - + {subscriptionData && (

          Subscription Details

          @@ -556,30 +565,25 @@ export function Subscription({ )} {isPro && (
          -
          )}
          )} - + Team Subscription - Set up a team workspace with collaborative features. Each seat costs $40/month and gets $40 of inference credits. + Set up a team workspace with collaborative features. Each seat costs $40/month and + gets $40 of inference credits. - +
          - +

          - Your team will have {seats} {seats === 1 ? 'seat' : 'seats'} with a total of ${seats * 40} inference credits per month. + Your team will have {seats} {seats === 1 ? 'seat' : 'seats'} with a total of $ + {seats * 40} inference credits per month.

          - + -
          @@ -626,74 +626,74 @@ export function Subscription({ )}
          ) -} +} // Skeleton component for subscription loading state function SubscriptionSkeleton() { return ( -
          +
          {/* Free Tier Skeleton */}
          - +
          - +
          - + {/* Pro Tier Skeleton */}
          - +
          - +
          - + {/* Team Tier Skeleton */}
          - +
          - +
          - + {/* Enterprise Tier Skeleton */}
          - +
          - +
          @@ -706,4 +706,4 @@ function SubscriptionSkeleton() { // Skeleton component for loading state in buttons function ButtonSkeleton() { return -} \ No newline at end of file +} diff --git a/sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx similarity index 75% rename from sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx index eb9f72054..401f93c4b 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/components/team-management/team-management.tsx @@ -1,8 +1,7 @@ -import { useState, useEffect, useCallback } from 'react' -import { Copy, PlusCircle, UserX, RefreshCw, CheckCircle, XCircle, Building } from 'lucide-react' +import { useCallback, useEffect, useState } from 'react' +import { Building, CheckCircle, Copy, PlusCircle, RefreshCw, UserX, XCircle } from 'lucide-react' +import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' import { Button } from '@/components/ui/button' -import { Input } from '@/components/ui/input' -import { Skeleton } from '@/components/ui/skeleton' import { Dialog, DialogContent, @@ -11,10 +10,11 @@ import { DialogHeader, DialogTitle, } from '@/components/ui/dialog' -import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert' -import { client, useSession } from '@/lib/auth-client' -import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs' +import { Input } from '@/components/ui/input' import { Progress } from '@/components/ui/progress' +import { Skeleton } from '@/components/ui/skeleton' +import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs' +import { client, useSession } from '@/lib/auth-client' import { createLogger } from '@/lib/logs/console-logger' const logger = createLogger('TeamManagement') @@ -22,7 +22,7 @@ const logger = createLogger('TeamManagement') export function TeamManagement() { const { data: session } = useSession() const { data: activeOrg } = client.useActiveOrganization() - + const [isLoading, setIsLoading] = useState(true) const [error, setError] = useState(null) const [organizations, setOrganizations] = useState([]) @@ -31,10 +31,10 @@ export function TeamManagement() { const [isCreatingOrg, setIsCreatingOrg] = useState(false) const [createOrgDialogOpen, setCreateOrgDialogOpen] = useState(false) const [removeMemberDialog, setRemoveMemberDialog] = useState<{ - open: boolean; - memberId: string; - memberName: string; - shouldReduceSeats: boolean; + open: boolean + memberId: string + memberName: string + shouldReduceSeats: boolean }>({ open: false, memberId: '', memberName: '', shouldReduceSeats: false }) const [orgName, setOrgName] = useState('') const [orgSlug, setOrgSlug] = useState('') @@ -46,30 +46,29 @@ export function TeamManagement() { const [hasTeamPlan, setHasTeamPlan] = useState(false) const [userRole, setUserRole] = useState('member') const [isAdminOrOwner, setIsAdminOrOwner] = useState(false) - + const loadData = useCallback(async () => { if (!session?.user) return - + try { setIsLoading(true) setError(null) - + // Get all organizations the user is a member of const orgsResponse = await client.organization.list() setOrganizations(orgsResponse.data || []) - + // Check if user has a team subscription const response = await fetch('/api/user/subscription') const data = await response.json() setHasTeamPlan(data.isTeam) - + // If user has team plan but no organizations, prompt to create one if (data.isTeam && (!orgsResponse.data || orgsResponse.data.length === 0)) { setOrgName(`${session.user.name || 'My'}'s Team`) setOrgSlug(generateSlug(`${session.user.name || 'My'}'s Team`)) setCreateOrgDialogOpen(true) } - } catch (err: any) { setError(err.message || 'Failed to load data') logger.error('Failed to load data:', err) @@ -77,73 +76,71 @@ export function TeamManagement() { setIsLoading(false) } }, [session?.user]) - + // Update local state when the active organization changes useEffect(() => { if (activeOrg) { setActiveOrganization(activeOrg) - + // Determine the user's role in this organization if (session?.user?.email && activeOrg.members) { const currentMember = activeOrg.members.find( (m: any) => m.user?.email === session.user?.email ) - + if (currentMember) { setUserRole(currentMember.role) setIsAdminOrOwner(currentMember.role === 'owner' || currentMember.role === 'admin') - logger.info('User role in organization', { - role: currentMember.role, - isAdminOrOwner: currentMember.role === 'owner' || currentMember.role === 'admin' + logger.info('User role in organization', { + role: currentMember.role, + isAdminOrOwner: currentMember.role === 'owner' || currentMember.role === 'admin', }) } } - + // Load subscription data for the organization if (activeOrg.id) { loadOrganizationSubscription(activeOrg.id) } } }, [activeOrg, session?.user?.email]) - + // Load organization's subscription data const loadOrganizationSubscription = async (orgId: string) => { try { setIsLoadingSubscription(true) logger.info('Loading subscription for organization', { orgId }) - + const { data, error } = await client.subscription.list({ - query: { referenceId: orgId } + query: { referenceId: orgId }, }) - + if (error) { logger.error('Error fetching organization subscription', { error }) setError('Failed to load subscription data') } else { - logger.info('Organization subscription data loaded', { - subscriptions: data?.map(s => ({ + logger.info('Organization subscription data loaded', { + subscriptions: data?.map((s) => ({ id: s.id, plan: s.plan, status: s.status, seats: s.seats, - referenceId: s.referenceId - })) + referenceId: s.referenceId, + })), }) - + // Filter to only active team subscription - const teamSubscription = data?.find( - sub => sub.status === 'active' && sub.plan === 'team' - ) - + const teamSubscription = data?.find((sub) => sub.status === 'active' && sub.plan === 'team') + if (teamSubscription) { - logger.info('Found active team subscription', { + logger.info('Found active team subscription', { id: teamSubscription.id, - seats: teamSubscription.seats + seats: teamSubscription.seats, }) setSubscriptionData([teamSubscription]) } else { - logger.warn('No active team subscription found for organization', { - orgId + logger.warn('No active team subscription found for organization', { + orgId, }) setSubscriptionData([]) } @@ -155,20 +152,20 @@ export function TeamManagement() { setIsLoadingSubscription(false) } } - + // Initial data loading useEffect(() => { loadData() }, [loadData]) - + // Refresh organization data const refreshOrganization = useCallback(async () => { if (!activeOrganization?.id) return - + try { const fullOrgResponse = await client.organization.getFullOrganization() setActiveOrganization(fullOrgResponse.data) - + // Also refresh subscription data when organization is refreshed if (fullOrgResponse.data?.id) { await loadOrganizationSubscription(fullOrgResponse.data.id) @@ -177,46 +174,48 @@ export function TeamManagement() { setError(err.message || 'Failed to refresh organization data') } }, [activeOrganization?.id]) - + // Handle seat reduction - remove members when seats are reduced const handleReduceSeats = async () => { - if (!session?.user || !activeOrganization || !subscriptionData || subscriptionData.length === 0) return - + if (!session?.user || !activeOrganization || !subscriptionData || subscriptionData.length === 0) + return + const currentSeats = subscriptionData[0]?.seats || 0 if (currentSeats <= 1) { - setError("Cannot reduce seats below 1") + setError('Cannot reduce seats below 1') return } - + // Calculate current usage const currentMemberCount = activeOrganization.members?.length || 0 - const pendingInvitationCount = activeOrganization.invitations?.filter( - (inv: any) => inv.status === 'pending' - ).length || 0 + const pendingInvitationCount = + activeOrganization.invitations?.filter((inv: any) => inv.status === 'pending').length || 0 const totalCount = currentMemberCount + pendingInvitationCount - + // Check if we need to remove members before reducing seats if (totalCount >= currentSeats) { - setError(`You have ${totalCount} active members/invitations. Please remove members or cancel invitations before reducing seats.`) + setError( + `You have ${totalCount} active members/invitations. Please remove members or cancel invitations before reducing seats.` + ) return } - + try { setIsLoading(true) setError(null) - + // Reduce the seats by 1 const newSeatCount = currentSeats - 1 - + // Upgrade with reduced seat count const { error } = await client.subscription.upgrade({ plan: 'team', referenceId: activeOrganization.id, successUrl: window.location.href, cancelUrl: window.location.href, - seats: newSeatCount + seats: newSeatCount, }) - + if (error) { setError(error.message || 'Failed to update seat count') } else { @@ -228,92 +227,91 @@ export function TeamManagement() { setIsLoading(false) } } - + // Generate a slug from organization name const generateSlug = (name: string) => { return name.toLowerCase().replace(/[^a-z0-9]/g, '-') } - + // Handle organization name change const handleOrgNameChange = (e: React.ChangeEvent) => { const newName = e.target.value setOrgName(newName) setOrgSlug(generateSlug(newName)) } - + // Create a new organization const handleCreateOrganization = async () => { if (!session?.user) return - + try { setIsCreatingOrg(true) setError(null) - + logger.info('Creating team organization', { name: orgName, slug: orgSlug }) - + // Create the organization using Better Auth API const result = await client.organization.create({ name: orgName, slug: orgSlug, }) - + if (!result.data?.id) { throw new Error('Failed to create organization') } - + const orgId = result.data.id logger.info('Organization created', { orgId }) - + // Set the new organization as active logger.info('Setting organization as active', { orgId }) await client.organization.setActive({ - organizationId: orgId + organizationId: orgId, }) - + // If the user has a team subscription, update the subscription reference // directly through a custom API endpoint instead of using upgrade if (hasTeamPlan) { const userSubResponse = await client.subscription.list() const teamSubscription = userSubResponse.data?.find( - sub => sub.plan === 'team' && sub.status === 'active' + (sub) => sub.plan === 'team' && sub.status === 'active' ) - + if (teamSubscription) { logger.info('Found user team subscription to transfer', { subscriptionId: teamSubscription.id, seats: teamSubscription.seats, - targetOrgId: orgId + targetOrgId: orgId, }) - + // Use a custom API endpoint to transfer the subscription without going to Stripe const transferResponse = await fetch('/api/user/transfer-subscription', { method: 'POST', headers: { - 'Content-Type': 'application/json' + 'Content-Type': 'application/json', }, body: JSON.stringify({ subscriptionId: teamSubscription.id, - organizationId: orgId - }) + organizationId: orgId, + }), }) - + if (!transferResponse.ok) { const errorData = await transferResponse.json() throw new Error(errorData.error || 'Failed to transfer subscription to organization') } - + logger.info('Successfully transferred subscription to organization') } } - + // Refresh the organization list await loadData() - + // Close the dialog setCreateOrgDialogOpen(false) setOrgName('') setOrgSlug('') - } catch (err: any) { logger.error('Failed to create organization', { error: err }) setError(err.message || 'Failed to create organization') @@ -321,24 +319,24 @@ export function TeamManagement() { setIsCreatingOrg(false) } } - + // Upgrade to team subscription with organization as reference const confirmTeamUpgrade = async (seats: number) => { if (!session?.user || !activeOrganization) return - + try { setIsLoading(true) setError(null) - + // Use the organization's ID as the reference for the team subscription const { error } = await client.subscription.upgrade({ plan: 'team', referenceId: activeOrganization.id, successUrl: window.location.href, cancelUrl: window.location.href, - seats: seats + seats: seats, }) - + if (error) { setError(error.message || 'Failed to upgrade to team subscription') } else { @@ -350,133 +348,130 @@ export function TeamManagement() { setIsLoading(false) } } - + // Set an organization as active const handleSetActiveOrg = async (orgId: string) => { if (!session?.user) return - + try { setIsLoading(true) - + // Set the active organization await client.organization.setActive({ organizationId: orgId, }) - } catch (err: any) { setError(err.message || 'Failed to set active organization') } finally { setIsLoading(false) } } - + // Invite a member to the organization const handleInviteMember = async () => { if (!session?.user || !activeOrganization) return - + try { setIsInviting(true) setError(null) setInviteSuccess(false) - + // Check seat limit - compare current members + pending invitations against seats - const currentMemberCount = activeOrganization.members?.length || 0; - const pendingInvitationCount = activeOrganization.invitations?.filter( - (inv: any) => inv.status === 'pending' - ).length || 0; - const totalCount = currentMemberCount + pendingInvitationCount; - + const currentMemberCount = activeOrganization.members?.length || 0 + const pendingInvitationCount = + activeOrganization.invitations?.filter((inv: any) => inv.status === 'pending').length || 0 + const totalCount = currentMemberCount + pendingInvitationCount + // Get the number of seats from subscription data - const teamSubscription = subscriptionData?.[0]; - const seatLimit = teamSubscription?.seats || 0; - + const teamSubscription = subscriptionData?.[0] + const seatLimit = teamSubscription?.seats || 0 + logger.info('Checking seat availability for invitation', { currentMembers: currentMemberCount, pendingInvites: pendingInvitationCount, totalUsed: totalCount, seatLimit: seatLimit, - subscriptionId: teamSubscription?.id - }); - + subscriptionId: teamSubscription?.id, + }) + if (totalCount >= seatLimit) { - const error = `You've reached your team seat limit of ${seatLimit}. Please upgrade your plan for more seats.`; + const error = `You've reached your team seat limit of ${seatLimit}. Please upgrade your plan for more seats.` logger.warn('Invitation failed - seat limit reached', { totalCount, - seatLimit - }); - setError(error); - return; + seatLimit, + }) + setError(error) + return } - + if (!inviteEmail || !inviteEmail.includes('@')) { - setError('Please enter a valid email address'); - return; + setError('Please enter a valid email address') + return } - + logger.info('Sending invitation to member', { email: inviteEmail, - organizationId: activeOrganization.id - }); - + organizationId: activeOrganization.id, + }) + // Invite the member const inviteResult = await client.organization.inviteMember({ email: inviteEmail, role: 'member', organizationId: activeOrganization.id, - }); - + }) + if (inviteResult.error) { - throw new Error(inviteResult.error.message || 'Failed to send invitation'); + throw new Error(inviteResult.error.message || 'Failed to send invitation') } - - logger.info('Invitation sent successfully'); - + + logger.info('Invitation sent successfully') + // Clear the input and show success message - setInviteEmail(''); - setInviteSuccess(true); - + setInviteEmail('') + setInviteSuccess(true) + // Refresh the organization - await refreshOrganization(); - + await refreshOrganization() } catch (err: any) { - logger.error('Error inviting member', { error: err }); - setError(err.message || 'Failed to invite member'); + logger.error('Error inviting member', { error: err }) + setError(err.message || 'Failed to invite member') } finally { - setIsInviting(false); + setIsInviting(false) } } - + // Remove a member from the organization const handleRemoveMember = async (member: any) => { if (!session?.user || !activeOrganization) return - + // Open confirmation dialog setRemoveMemberDialog({ open: true, memberId: member.id, memberName: member.user?.name || member.user?.email || 'this member', - shouldReduceSeats: false + shouldReduceSeats: false, }) } - + // Actual member removal after confirmation const confirmRemoveMember = async (shouldReduceSeats: boolean = false) => { const { memberId } = removeMemberDialog if (!session?.user || !activeOrganization || !memberId) return - + try { setIsLoading(true) - + // Remove the member await client.organization.removeMember({ memberIdOrEmail: memberId, organizationId: activeOrganization.id, }) - + // If the user opted to reduce seats as well if (shouldReduceSeats && subscriptionData && subscriptionData.length > 0) { const currentSeats = subscriptionData[0]?.seats || 0 - + if (currentSeats > 1) { // Reduce the seat count by 1 await client.subscription.upgrade({ @@ -484,50 +479,48 @@ export function TeamManagement() { referenceId: activeOrganization.id, successUrl: window.location.href, cancelUrl: window.location.href, - seats: currentSeats - 1 + seats: currentSeats - 1, }) } } - + // Refresh the organization await refreshOrganization() - + // Close the dialog setRemoveMemberDialog({ open: false, memberId: '', memberName: '', shouldReduceSeats: false }) - } catch (err: any) { setError(err.message || 'Failed to remove member') } finally { setIsLoading(false) } } - + // Cancel an invitation const handleCancelInvitation = async (invitationId: string) => { if (!session?.user || !activeOrganization) return - + try { setIsLoading(true) - + // Cancel the invitation await client.organization.cancelInvitation({ invitationId, }) - + // Refresh the organization await refreshOrganization() - } catch (err: any) { setError(err.message || 'Failed to cancel invitation') } finally { setIsLoading(false) } } - + if (isLoading && !activeOrganization && !hasTeamPlan) { return } - + const getInvitationStatus = (status: string) => { switch (status) { case 'pending': @@ -555,29 +548,27 @@ export function TeamManagement() { return status } } - + // No organization yet - show creation UI if (!activeOrganization) { return (

          - {hasTeamPlan - ? "Create Your Team Workspace" - : "No Team Workspace"} + {hasTeamPlan ? 'Create Your Team Workspace' : 'No Team Workspace'}

          - {hasTeamPlan - ? "You're subscribed to a team plan. Create your workspace to start collaborating with your team." + {hasTeamPlan + ? "You're subscribed to a team plan. Create your workspace to start collaborating with your team." : "You don't have a team workspace yet. Create one to start collaborating with your team."}

          - +
          - + @@ -586,39 +577,35 @@ export function TeamManagement() { Create a workspace for your team to collaborate on projects. - +
          - +
          - +
          simstudio.ai/team/
          - setOrgSlug(e.target.value)} className="rounded-l-none" />
          - + {error && ( Error {error} )} - +
          @@ -642,16 +627,16 @@ export function TeamManagement() {
          ) } - + return (

          Team Management

          - + {organizations.length > 1 && (
          - setInviteEmail(e.target.value)} disabled={isInviting} /> -
          - + {inviteSuccess && ( -

          - Invitation sent successfully -

          +

          Invitation sent successfully

          )}
          )} - + {/* Team Seats Usage - only show to admins/owners */} {isAdminOrOwner && (

          Team Seats

          - + {isLoadingSubscription ? ( ) : subscriptionData && subscriptionData.length > 0 ? ( @@ -719,36 +699,40 @@ export function TeamManagement() {
          Used - {(activeOrganization.members?.length || 0) + - (activeOrganization.invitations?.filter((inv: any) => inv.status === 'pending').length || 0)} + {(activeOrganization.members?.length || 0) + + (activeOrganization.invitations?.filter( + (inv: any) => inv.status === 'pending' + ).length || 0)} /{subscriptionData[0]?.seats || 0}
          - inv.status === 'pending').length || 0)) / - (subscriptionData[0]?.seats || 1) * 100} + inv.status === 'pending' + ).length || 0)) / + (subscriptionData[0]?.seats || 1)) * + 100 + } className="h-2" /> - +
          - -
          )} - + {/* Team Members - show to all users */}

          Team Members

          - + {activeOrganization.members?.length === 0 ? (
          No members in this organization yet. @@ -796,42 +782,42 @@ export function TeamManagement() { {member.role.charAt(0).toUpperCase() + member.role.slice(1)}
          - + {/* Only show remove button for non-owners and if current user is admin/owner */} - {isAdminOrOwner && member.role !== 'owner' && member.user?.email !== session?.user?.email && ( - - )} + {isAdminOrOwner && + member.role !== 'owner' && + member.user?.email !== session?.user?.email && ( + + )}
          ))}
          )}
          - + {/* Pending Invitations - only show to admins/owners */} {isAdminOrOwner && activeOrganization.invitations?.length > 0 && (

          Pending Invitations

          - +
          {activeOrganization.invitations?.map((invitation: any) => (
          {invitation.email}
          -
          - {getInvitationStatus(invitation.status)} -
          +
          {getInvitationStatus(invitation.status)}
          - + {invitation.status === 'pending' && ( -
          )} - +

          Team Workspace Name

          {activeOrganization.name}
          - +

          URL Slug

          @@ -862,14 +848,14 @@ export function TeamManagement() {
          - +

          Created On

          {new Date(activeOrganization.createdAt).toLocaleDateString()}
          - + {/* Only show subscription details to admins/owners */} {isAdminOrOwner && (
          @@ -879,10 +865,15 @@ export function TeamManagement() { ) : subscriptionData ? (
          -
          +
          {subscriptionData.status} {subscriptionData.cancelAtPeriodEnd ? ' (Cancels at period end)' : ''} @@ -891,10 +882,15 @@ export function TeamManagement() {
          Team seats: {subscriptionData.seats}
          {subscriptionData.periodEnd && ( -
          Next billing date: {new Date(subscriptionData.periodEnd).toLocaleDateString()}
          +
          + Next billing date:{' '} + {new Date(subscriptionData.periodEnd).toLocaleDateString()} +
          )} {subscriptionData.trialEnd && ( -
          Trial ends: {new Date(subscriptionData.trialEnd).toLocaleDateString()}
          +
          + Trial ends: {new Date(subscriptionData.trialEnd).toLocaleDateString()} +
          )}
          This subscription is associated with this team workspace. @@ -906,15 +902,17 @@ export function TeamManagement() { )}
          )} - + {!isAdminOrOwner && (

          Your Role

          - You are a {userRole} of this workspace. + You are a {userRole} of this + workspace. {userRole === 'member' && (

          - Contact a workspace admin or owner for subscription changes or to invite new members. + Contact a workspace admin or owner for subscription changes or to invite new + members.

          )}
          @@ -923,12 +921,12 @@ export function TeamManagement() {
          - + {/* Member removal confirmation dialog */} - { - if (!open) setRemoveMemberDialog({ ...removeMemberDialog, open: false }); + if (!open) setRemoveMemberDialog({ ...removeMemberDialog, open: false }) }} > @@ -938,18 +936,20 @@ export function TeamManagement() { Are you sure you want to remove {removeMemberDialog.memberName} from the team? - +
          - setRemoveMemberDialog({ - ...removeMemberDialog, - shouldReduceSeats: e.target.checked - })} + onChange={(e) => + setRemoveMemberDialog({ + ...removeMemberDialog, + shouldReduceSeats: e.target.checked, + }) + } />
          - + @@ -982,68 +989,68 @@ export function TeamManagement() { // Skeleton component for team management loading state function TeamManagementSkeleton() { - return ( -
          -
          - - + return ( +
          +
          + + +
          + +
          +
          + +
          + + +
          - -
          -
          - -
          - + +
          + +
          +
          + + +
          + +
          +
          - -
          - -
          -
          - - -
          - -
          - - -
          -
          -
          - -
          - -
          - {[1, 2, 3].map((i) => ( -
          -
          - - - -
          - +
          + +
          + +
          + {[1, 2, 3].map((i) => ( +
          +
          + + +
          - ))} -
          + +
          + ))}
          - ) - } - - // Skeleton component for loading state in buttons - function ButtonSkeleton() { - return - } - - // Skeleton component for loading state in team seats - function TeamSeatsSkeleton() { - return ( -
          - - -
          - ) - } \ No newline at end of file +
          + ) +} + +// Skeleton component for loading state in buttons +function ButtonSkeleton() { + return +} + +// Skeleton component for loading state in team seats +function TeamSeatsSkeleton() { + return ( +
          + + +
          + ) +} diff --git a/sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx b/apps/sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx similarity index 91% rename from sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx rename to apps/sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx index 82122a445..427e7a28c 100644 --- a/sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx +++ b/apps/sim/app/w/components/sidebar/components/settings-modal/settings-modal.tsx @@ -1,11 +1,12 @@ 'use client' -import { useEffect, useState, useMemo, useRef } from 'react' +import { useEffect, useMemo, useRef, useState } from 'react' import { X } from 'lucide-react' import { Button } from '@/components/ui/button' import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog' -import { cn } from '@/lib/utils' import { client, useSubscription } from '@/lib/auth-client' +import { createLogger } from '@/lib/logs/console-logger' +import { cn } from '@/lib/utils' import { useGeneralStore } from '@/stores/settings/general/store' import { Account } from './components/account/account' import { ApiKeys } from './components/api-keys/api-keys' @@ -13,10 +14,9 @@ import { Credentials } from './components/credentials/credentials' import { EnvironmentVariables } from './components/environment/environment' import { General } from './components/general/general' import { Privacy } from './components/privacy/privacy' -import { Subscription } from './components/subscription/subscription' import { SettingsNavigation } from './components/settings-navigation/settings-navigation' +import { Subscription } from './components/subscription/subscription' import { TeamManagement } from './components/team-management/team-management' -import { createLogger } from '@/lib/logs/console-logger' const logger = createLogger('SettingsModal') @@ -25,7 +25,15 @@ interface SettingsModalProps { onOpenChange: (open: boolean) => void } -type SettingsSection = 'general' | 'environment' | 'account' | 'credentials' | 'apikeys' | 'subscription' | 'team' | 'privacy' +type SettingsSection = + | 'general' + | 'environment' + | 'account' + | 'credentials' + | 'apikeys' + | 'subscription' + | 'team' + | 'privacy' export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { const [activeSection, setActiveSection] = useState('general') @@ -34,47 +42,47 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { const [subscriptionData, setSubscriptionData] = useState(null) const [usageData, setUsageData] = useState(null) const [isLoading, setIsLoading] = useState(true) - const loadSettings = useGeneralStore(state => state.loadSettings) + const loadSettings = useGeneralStore((state) => state.loadSettings) const subscription = useMemo(() => useSubscription(), []) const hasLoadedInitialData = useRef(false) useEffect(() => { async function loadAllSettings() { if (!open) return - + if (hasLoadedInitialData.current) return - + setIsLoading(true) - + try { await loadSettings() - + const proStatusResponse = await fetch('/api/user/subscription') - + if (proStatusResponse.ok) { const subData = await proStatusResponse.json() setIsPro(subData.isPro) setIsTeam(subData.isTeam) - + if (!subData.isTeam && activeSection === 'team') { setActiveSection('general') } } - + const usageResponse = await fetch('/api/user/usage') if (usageResponse.ok) { const usageData = await usageResponse.json() setUsageData(usageData) } - + try { const result = await subscription.list() - + if (result.data && result.data.length > 0) { const activeSubscription = result.data.find( - sub => sub.status === 'active' && (sub.plan === 'team' || sub.plan === 'pro') + (sub) => sub.status === 'active' && (sub.plan === 'team' || sub.plan === 'pro') ) - + if (activeSubscription) { setSubscriptionData(activeSubscription) } @@ -82,7 +90,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { } catch (error) { logger.error('Error fetching subscription information', error) } - + hasLoadedInitialData.current = true } catch (error) { logger.error('Error loading settings data:', error) @@ -90,7 +98,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) { setIsLoading(false) } } - + if (open) { loadAllSettings() } else { @@ -134,9 +142,9 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
          {/* Navigation Sidebar */}
          -
          @@ -160,7 +168,7 @@ export function SettingsModal({ open, onOpenChange }: SettingsModalProps) {
          {isSubscriptionEnabled && (
          - - {config.password ? '**********************' : - (config.id === 'providerConfig' && config.value && typeof config.value === 'object') - ? (Object.keys(config.value).length === 0 ? 'Webhook pending configuration' : 'Webhook configured') - : (config.value || config.placeholder || 'Text input') - } + {config.password + ? '**********************' + : config.id === 'providerConfig' && config.value && typeof config.value === 'object' + ? Object.keys(config.value).length === 0 + ? 'Webhook pending configuration' + : 'Webhook configured' + : config.value || config.placeholder || 'Text input'}
          ) case 'long-input': @@ -232,9 +234,7 @@ function PreviewSubBlock({ config }: { config: ExtendedSubBlockConfig }) { case 'file-selector': return (
          - - {config.value ? 'File selected' : config.placeholder || 'Select file'} - + {config.value ? 'File selected' : config.placeholder || 'Select file'} - - {config.value ? 'Folder selected' : config.placeholder || 'Select folder'} - + {config.value ? 'Folder selected' : config.placeholder || 'Select folder'} Eval expression
          - ) + ) case 'date-input': return (
          @@ -423,16 +421,20 @@ function PreviewWorkflowBlock({ id, data }: NodeProps) { return (
          - + {/* Block Header */}
          -
          +
          @@ -448,8 +450,10 @@ function PreviewWorkflowBlock({ id, data }: NodeProps) { subBlockRows.map((row, rowIndex) => (
          {row.map((subBlock, blockIndex) => ( -
          +
          ))} @@ -594,4 +598,4 @@ export function WorkflowPreview(props: WorkflowPreviewProps) { ) -} \ No newline at end of file +} diff --git a/sim/app/w/error.tsx b/apps/sim/app/w/error.tsx similarity index 100% rename from sim/app/w/error.tsx rename to apps/sim/app/w/error.tsx diff --git a/sim/app/w/global-error.tsx b/apps/sim/app/w/global-error.tsx similarity index 100% rename from sim/app/w/global-error.tsx rename to apps/sim/app/w/global-error.tsx diff --git a/sim/app/w/hooks/use-registry-loading.ts b/apps/sim/app/w/hooks/use-registry-loading.ts similarity index 97% rename from sim/app/w/hooks/use-registry-loading.ts rename to apps/sim/app/w/hooks/use-registry-loading.ts index f264688b2..0ca7ba513 100644 --- a/sim/app/w/hooks/use-registry-loading.ts +++ b/apps/sim/app/w/hooks/use-registry-loading.ts @@ -5,28 +5,28 @@ import { useWorkflowRegistry } from '@/stores/workflows/registry/store' /** * Custom hook to manage workflow registry loading state - * + * * This hook initializes the loading state and automatically clears it * when workflows are loaded or after a timeout */ export function useRegistryLoading() { const { workflows, setLoading } = useWorkflowRegistry() - + useEffect(() => { // Set loading state initially setLoading(true) - + // If workflows are already loaded, clear loading state if (Object.keys(workflows).length > 0) { setTimeout(() => setLoading(false), 300) return } - + // Create a timeout to clear loading state after max time const timeout = setTimeout(() => { setLoading(false) }, 3000) // 3 second maximum loading time - + // Listen for workflows to be loaded const checkInterval = setInterval(() => { const currentWorkflows = useWorkflowRegistry.getState().workflows @@ -35,10 +35,10 @@ export function useRegistryLoading() { clearInterval(checkInterval) } }, 200) - + return () => { clearTimeout(timeout) clearInterval(checkInterval) } }, [setLoading, workflows]) -} \ No newline at end of file +} diff --git a/sim/app/w/layout.tsx b/apps/sim/app/w/layout.tsx similarity index 100% rename from sim/app/w/layout.tsx rename to apps/sim/app/w/layout.tsx diff --git a/sim/app/w/logs/components/control-bar/control-bar.tsx b/apps/sim/app/w/logs/components/control-bar/control-bar.tsx similarity index 100% rename from sim/app/w/logs/components/control-bar/control-bar.tsx rename to apps/sim/app/w/logs/components/control-bar/control-bar.tsx diff --git a/sim/app/w/logs/components/filters/components/filter-section.tsx b/apps/sim/app/w/logs/components/filters/components/filter-section.tsx similarity index 100% rename from sim/app/w/logs/components/filters/components/filter-section.tsx rename to apps/sim/app/w/logs/components/filters/components/filter-section.tsx diff --git a/sim/app/w/logs/components/filters/components/level.tsx b/apps/sim/app/w/logs/components/filters/components/level.tsx similarity index 100% rename from sim/app/w/logs/components/filters/components/level.tsx rename to apps/sim/app/w/logs/components/filters/components/level.tsx diff --git a/sim/app/w/logs/components/filters/components/timeline.tsx b/apps/sim/app/w/logs/components/filters/components/timeline.tsx similarity index 100% rename from sim/app/w/logs/components/filters/components/timeline.tsx rename to apps/sim/app/w/logs/components/filters/components/timeline.tsx diff --git a/sim/app/w/logs/components/filters/components/workflow.tsx b/apps/sim/app/w/logs/components/filters/components/workflow.tsx similarity index 100% rename from sim/app/w/logs/components/filters/components/workflow.tsx rename to apps/sim/app/w/logs/components/filters/components/workflow.tsx diff --git a/sim/app/w/logs/components/filters/filters.tsx b/apps/sim/app/w/logs/components/filters/filters.tsx similarity index 100% rename from sim/app/w/logs/components/filters/filters.tsx rename to apps/sim/app/w/logs/components/filters/filters.tsx diff --git a/sim/app/w/logs/components/sidebar/sidebar.tsx b/apps/sim/app/w/logs/components/sidebar/sidebar.tsx similarity index 98% rename from sim/app/w/logs/components/sidebar/sidebar.tsx rename to apps/sim/app/w/logs/components/sidebar/sidebar.tsx index ce0135ddd..fffb527a7 100644 --- a/sim/app/w/logs/components/sidebar/sidebar.tsx +++ b/apps/sim/app/w/logs/components/sidebar/sidebar.tsx @@ -1,7 +1,7 @@ 'use client' import { useEffect, useMemo, useRef, useState } from 'react' -import { ChevronDown, ChevronUp, X, Code } from 'lucide-react' +import { ChevronDown, ChevronUp, Code, X } from 'lucide-react' import { Button } from '@/components/ui/button' import { CopyButton } from '@/components/ui/copy-button' import { ScrollArea } from '@/components/ui/scroll-area' @@ -29,11 +29,13 @@ const tryPrettifyJson = (content: string): { isJson: boolean; formatted: string try { // First check if the content looks like JSON (starts with { or [) const trimmed = content.trim() - if (!(trimmed.startsWith('{') || trimmed.startsWith('[')) || - !(trimmed.endsWith('}') || trimmed.endsWith(']'))) { + if ( + !(trimmed.startsWith('{') || trimmed.startsWith('[')) || + !(trimmed.endsWith('}') || trimmed.endsWith(']')) + ) { return { isJson: false, formatted: content } } - + // Try to parse the JSON const parsed = JSON.parse(trimmed) const prettified = JSON.stringify(parsed, null, 2) diff --git a/sim/app/w/logs/components/tool-calls/tool-calls-display.tsx b/apps/sim/app/w/logs/components/tool-calls/tool-calls-display.tsx similarity index 100% rename from sim/app/w/logs/components/tool-calls/tool-calls-display.tsx rename to apps/sim/app/w/logs/components/tool-calls/tool-calls-display.tsx diff --git a/sim/app/w/logs/components/trace-spans/trace-spans-display.tsx b/apps/sim/app/w/logs/components/trace-spans/trace-spans-display.tsx similarity index 100% rename from sim/app/w/logs/components/trace-spans/trace-spans-display.tsx rename to apps/sim/app/w/logs/components/trace-spans/trace-spans-display.tsx diff --git a/sim/app/w/logs/logs.tsx b/apps/sim/app/w/logs/logs.tsx similarity index 100% rename from sim/app/w/logs/logs.tsx rename to apps/sim/app/w/logs/logs.tsx diff --git a/sim/app/w/logs/page.tsx b/apps/sim/app/w/logs/page.tsx similarity index 100% rename from sim/app/w/logs/page.tsx rename to apps/sim/app/w/logs/page.tsx diff --git a/sim/app/w/logs/stores/store.ts b/apps/sim/app/w/logs/stores/store.ts similarity index 100% rename from sim/app/w/logs/stores/store.ts rename to apps/sim/app/w/logs/stores/store.ts diff --git a/sim/app/w/logs/stores/types.ts b/apps/sim/app/w/logs/stores/types.ts similarity index 100% rename from sim/app/w/logs/stores/types.ts rename to apps/sim/app/w/logs/stores/types.ts diff --git a/sim/app/w/logs/utils/format-date.ts b/apps/sim/app/w/logs/utils/format-date.ts similarity index 100% rename from sim/app/w/logs/utils/format-date.ts rename to apps/sim/app/w/logs/utils/format-date.ts diff --git a/sim/app/w/marketplace/components/control-bar/control-bar.tsx b/apps/sim/app/w/marketplace/components/control-bar/control-bar.tsx similarity index 100% rename from sim/app/w/marketplace/components/control-bar/control-bar.tsx rename to apps/sim/app/w/marketplace/components/control-bar/control-bar.tsx diff --git a/sim/app/w/marketplace/components/error-message.tsx b/apps/sim/app/w/marketplace/components/error-message.tsx similarity index 100% rename from sim/app/w/marketplace/components/error-message.tsx rename to apps/sim/app/w/marketplace/components/error-message.tsx diff --git a/sim/app/w/marketplace/components/section.tsx b/apps/sim/app/w/marketplace/components/section.tsx similarity index 100% rename from sim/app/w/marketplace/components/section.tsx rename to apps/sim/app/w/marketplace/components/section.tsx diff --git a/sim/app/w/marketplace/components/toolbar/toolbar.tsx b/apps/sim/app/w/marketplace/components/toolbar/toolbar.tsx similarity index 100% rename from sim/app/w/marketplace/components/toolbar/toolbar.tsx rename to apps/sim/app/w/marketplace/components/toolbar/toolbar.tsx diff --git a/sim/app/w/marketplace/components/workflow-card-skeleton.tsx b/apps/sim/app/w/marketplace/components/workflow-card-skeleton.tsx similarity index 100% rename from sim/app/w/marketplace/components/workflow-card-skeleton.tsx rename to apps/sim/app/w/marketplace/components/workflow-card-skeleton.tsx diff --git a/sim/app/w/marketplace/components/workflow-card.tsx b/apps/sim/app/w/marketplace/components/workflow-card.tsx similarity index 99% rename from sim/app/w/marketplace/components/workflow-card.tsx rename to apps/sim/app/w/marketplace/components/workflow-card.tsx index 733965586..55c97398e 100644 --- a/sim/app/w/marketplace/components/workflow-card.tsx +++ b/apps/sim/app/w/marketplace/components/workflow-card.tsx @@ -5,8 +5,8 @@ import { useRouter } from 'next/navigation' import { Eye } from 'lucide-react' import { Card, CardContent, CardFooter, CardHeader } from '@/components/ui/card' import { useWorkflowRegistry } from '@/stores/workflows/registry/store' -import { Workflow } from '../marketplace' import { WorkflowPreview } from '@/app/w/components/workflow-preview/generic-workflow-preview' +import { Workflow } from '../marketplace' /** * WorkflowCardProps interface - defines the properties for the WorkflowCard component @@ -30,7 +30,6 @@ export function WorkflowCard({ workflow, onHover }: WorkflowCardProps) { const router = useRouter() const { createWorkflow } = useWorkflowRegistry() - // When workflow state becomes available, update preview ready state useEffect(() => { if (workflow.workflowState && !isPreviewReady) { @@ -109,8 +108,7 @@ export function WorkflowCard({ workflow, onHover }: WorkflowCardProps) { backgroundSize: 'cover', backgroundPosition: 'center top', }} - > -
          + >
          ) : ( // Fallback to text if no preview or thumbnail is available
          diff --git a/sim/app/w/marketplace/constants/categories.tsx b/apps/sim/app/w/marketplace/constants/categories.tsx similarity index 100% rename from sim/app/w/marketplace/constants/categories.tsx rename to apps/sim/app/w/marketplace/constants/categories.tsx diff --git a/sim/app/w/marketplace/marketplace.tsx b/apps/sim/app/w/marketplace/marketplace.tsx similarity index 100% rename from sim/app/w/marketplace/marketplace.tsx rename to apps/sim/app/w/marketplace/marketplace.tsx diff --git a/sim/app/w/marketplace/page.tsx b/apps/sim/app/w/marketplace/page.tsx similarity index 100% rename from sim/app/w/marketplace/page.tsx rename to apps/sim/app/w/marketplace/page.tsx diff --git a/sim/app/zoom-prevention.tsx b/apps/sim/app/zoom-prevention.tsx similarity index 100% rename from sim/app/zoom-prevention.tsx rename to apps/sim/app/zoom-prevention.tsx diff --git a/sim/blocks/blocks/agent.test.ts b/apps/sim/blocks/blocks/agent.test.ts similarity index 100% rename from sim/blocks/blocks/agent.test.ts rename to apps/sim/blocks/blocks/agent.test.ts diff --git a/sim/blocks/blocks/agent.ts b/apps/sim/blocks/blocks/agent.ts similarity index 96% rename from sim/blocks/blocks/agent.ts rename to apps/sim/blocks/blocks/agent.ts index 2aafc5ffb..76c42654b 100644 --- a/sim/blocks/blocks/agent.ts +++ b/apps/sim/blocks/blocks/agent.ts @@ -1,6 +1,6 @@ import { AgentIcon } from '@/components/icons' -import { createLogger } from '@/lib/logs/console-logger' import { isHosted } from '@/lib/environment' +import { createLogger } from '@/lib/logs/console-logger' import { useOllamaStore } from '@/stores/ollama/store' import { MODELS_TEMP_RANGE_0_1, MODELS_TEMP_RANGE_0_2 } from '@/providers/model-capabilities' import { getAllModelProviders, getBaseModelProviders } from '@/providers/utils' @@ -33,7 +33,9 @@ const getToolIdFromBlock = (blockType: string): string | undefined => { try { const { getAllBlocks } = require('@/blocks/registry') const blocks = getAllBlocks() - const block = blocks.find((b: { type: string; tools?: { access?: string[] } }) => b.type === blockType) + const block = blocks.find( + (b: { type: string; tools?: { access?: string[] } }) => b.type === blockType + ) return block?.tools?.access?.[0] } catch (error) { logger.error('Error getting tool ID from block', { error }) @@ -118,12 +120,15 @@ export const AgentBlock: BlockConfig = { value: [ // OpenAI models 'gpt-4o', - 'o1', 'o1-mini', 'o1-preview', - 'o3', 'o3-preview', + 'o1', + 'o1-mini', + 'o1-preview', + 'o3', + 'o3-preview', 'o4-mini', // Claude models - 'claude-3-5-sonnet-20240620', - 'claude-3-7-sonnet-20250219' + 'claude-3-5-sonnet-20240620', + 'claude-3-7-sonnet-20250219', ], not: true, // Show for all models EXCEPT those listed } diff --git a/sim/blocks/blocks/airtable.ts b/apps/sim/blocks/blocks/airtable.ts similarity index 100% rename from sim/blocks/blocks/airtable.ts rename to apps/sim/blocks/blocks/airtable.ts diff --git a/sim/blocks/blocks/api.ts b/apps/sim/blocks/blocks/api.ts similarity index 93% rename from sim/blocks/blocks/api.ts rename to apps/sim/blocks/blocks/api.ts index ba8f9072c..8486904b5 100644 --- a/sim/blocks/blocks/api.ts +++ b/apps/sim/blocks/blocks/api.ts @@ -39,7 +39,8 @@ export const ApiBlock: BlockConfig = { type: 'table', layout: 'full', columns: ['Key', 'Value'], - description: 'Custom headers (standard headers like User-Agent, Accept, etc. are added automatically)', + description: + 'Custom headers (standard headers like User-Agent, Accept, etc. are added automatically)', }, { id: 'body', diff --git a/sim/blocks/blocks/autoblocks.ts b/apps/sim/blocks/blocks/autoblocks.ts similarity index 100% rename from sim/blocks/blocks/autoblocks.ts rename to apps/sim/blocks/blocks/autoblocks.ts diff --git a/sim/blocks/blocks/browser_use.ts b/apps/sim/blocks/blocks/browser_use.ts similarity index 100% rename from sim/blocks/blocks/browser_use.ts rename to apps/sim/blocks/blocks/browser_use.ts diff --git a/sim/blocks/blocks/clay.ts b/apps/sim/blocks/blocks/clay.ts similarity index 100% rename from sim/blocks/blocks/clay.ts rename to apps/sim/blocks/blocks/clay.ts diff --git a/sim/blocks/blocks/condition.ts b/apps/sim/blocks/blocks/condition.ts similarity index 100% rename from sim/blocks/blocks/condition.ts rename to apps/sim/blocks/blocks/condition.ts diff --git a/sim/blocks/blocks/confluence.ts b/apps/sim/blocks/blocks/confluence.ts similarity index 100% rename from sim/blocks/blocks/confluence.ts rename to apps/sim/blocks/blocks/confluence.ts diff --git a/sim/blocks/blocks/elevenlabs.ts b/apps/sim/blocks/blocks/elevenlabs.ts similarity index 85% rename from sim/blocks/blocks/elevenlabs.ts rename to apps/sim/blocks/blocks/elevenlabs.ts index bcab7b532..67e3bf2f5 100644 --- a/sim/blocks/blocks/elevenlabs.ts +++ b/apps/sim/blocks/blocks/elevenlabs.ts @@ -1,6 +1,6 @@ import { ElevenLabsIcon } from '@/components/icons' -import { BlockConfig } from '../types' import { ToolResponse } from '@/tools/types' +import { BlockConfig } from '../types' interface ElevenLabsBlockResponse extends ToolResponse { output: { @@ -16,7 +16,7 @@ export const ElevenLabsBlock: BlockConfig = { category: 'tools', bgColor: '#181C1E', icon: ElevenLabsIcon, - + tools: { access: ['elevenlabs_tts'], config: { @@ -29,7 +29,7 @@ export const ElevenLabsBlock: BlockConfig = { }), }, }, - + inputs: { text: { type: 'string', @@ -44,11 +44,11 @@ export const ElevenLabsBlock: BlockConfig = { required: false, }, apiKey: { - type: 'string', - required: true, - }, + type: 'string', + required: true, + }, }, - + outputs: { response: { type: { @@ -56,7 +56,7 @@ export const ElevenLabsBlock: BlockConfig = { }, }, }, - + subBlocks: [ { id: 'text', @@ -73,13 +73,13 @@ export const ElevenLabsBlock: BlockConfig = { placeholder: 'Enter the voice ID', }, { - id: 'apiKey', - title: 'API Key', - type: 'short-input', - layout: 'full', - placeholder: 'Enter your ElevenLabs API key', - password: true, - }, + id: 'apiKey', + title: 'API Key', + type: 'short-input', + layout: 'full', + placeholder: 'Enter your ElevenLabs API key', + password: true, + }, { id: 'modelId', title: 'Model ID (Optional)', @@ -90,7 +90,7 @@ export const ElevenLabsBlock: BlockConfig = { 'eleven_multilingual_v2', 'eleven_turbo_v2', 'eleven_turbo_v2_5', - 'eleven_flash_v2_5' + 'eleven_flash_v2_5', ], }, ], diff --git a/sim/blocks/blocks/evaluator.ts b/apps/sim/blocks/blocks/evaluator.ts similarity index 100% rename from sim/blocks/blocks/evaluator.ts rename to apps/sim/blocks/blocks/evaluator.ts diff --git a/sim/blocks/blocks/exa.ts b/apps/sim/blocks/blocks/exa.ts similarity index 100% rename from sim/blocks/blocks/exa.ts rename to apps/sim/blocks/blocks/exa.ts diff --git a/sim/blocks/blocks/file.ts b/apps/sim/blocks/blocks/file.ts similarity index 100% rename from sim/blocks/blocks/file.ts rename to apps/sim/blocks/blocks/file.ts index 0d13a79da..93f28fe9c 100644 --- a/sim/blocks/blocks/file.ts +++ b/apps/sim/blocks/blocks/file.ts @@ -1,6 +1,6 @@ import { DocumentIcon } from '@/components/icons' -import { createLogger } from '@/lib/logs/console-logger' import { isProd } from '@/lib/environment' +import { createLogger } from '@/lib/logs/console-logger' import { FileParserOutput } from '@/tools/file/types' import { BlockConfig, SubBlockConfig, SubBlockLayout, SubBlockType } from '../types' diff --git a/sim/blocks/blocks/firecrawl.ts b/apps/sim/blocks/blocks/firecrawl.ts similarity index 100% rename from sim/blocks/blocks/firecrawl.ts rename to apps/sim/blocks/blocks/firecrawl.ts diff --git a/sim/blocks/blocks/function.ts b/apps/sim/blocks/blocks/function.ts similarity index 100% rename from sim/blocks/blocks/function.ts rename to apps/sim/blocks/blocks/function.ts diff --git a/sim/blocks/blocks/github.ts b/apps/sim/blocks/blocks/github.ts similarity index 100% rename from sim/blocks/blocks/github.ts rename to apps/sim/blocks/blocks/github.ts diff --git a/sim/blocks/blocks/gmail.ts b/apps/sim/blocks/blocks/gmail.ts similarity index 100% rename from sim/blocks/blocks/gmail.ts rename to apps/sim/blocks/blocks/gmail.ts diff --git a/sim/blocks/blocks/google.ts b/apps/sim/blocks/blocks/google.ts similarity index 91% rename from sim/blocks/blocks/google.ts rename to apps/sim/blocks/blocks/google.ts index 7bd0c0feb..c29ac615f 100644 --- a/sim/blocks/blocks/google.ts +++ b/apps/sim/blocks/blocks/google.ts @@ -1,6 +1,6 @@ import { GoogleIcon } from '@/components/icons' -import { BlockConfig } from '../types' import { ToolResponse } from '@/tools/types' +import { BlockConfig } from '../types' interface GoogleSearchResponse extends ToolResponse { output: { @@ -24,7 +24,8 @@ export const GoogleSearchBlock: BlockConfig = { type: 'google_search', name: 'Google Search', description: 'Search the web', - longDescription: 'Searches the web using the Google Custom Search API, which provides high-quality search results from the entire internet or a specific site defined by a custom search engine ID.', + longDescription: + 'Searches the web using the Google Custom Search API, which provides high-quality search results from the entire internet or a specific site defined by a custom search engine ID.', category: 'tools', bgColor: '#E0E0E0', icon: GoogleIcon, @@ -61,7 +62,7 @@ export const GoogleSearchBlock: BlockConfig = { layout: 'half', placeholder: '10', description: 'Number of search results to return (max: 10)', - } + }, ], tools: { @@ -97,7 +98,7 @@ export const GoogleSearchBlock: BlockConfig = { type: 'string', required: false, description: 'Number of results to return (default: 10, max: 10)', - } + }, }, outputs: { @@ -108,4 +109,4 @@ export const GoogleSearchBlock: BlockConfig = { } as any, }, }, -} \ No newline at end of file +} diff --git a/sim/blocks/blocks/google_docs.ts b/apps/sim/blocks/blocks/google_docs.ts similarity index 100% rename from sim/blocks/blocks/google_docs.ts rename to apps/sim/blocks/blocks/google_docs.ts diff --git a/sim/blocks/blocks/google_drive.ts b/apps/sim/blocks/blocks/google_drive.ts similarity index 100% rename from sim/blocks/blocks/google_drive.ts rename to apps/sim/blocks/blocks/google_drive.ts diff --git a/sim/blocks/blocks/google_sheets.ts b/apps/sim/blocks/blocks/google_sheets.ts similarity index 90% rename from sim/blocks/blocks/google_sheets.ts rename to apps/sim/blocks/blocks/google_sheets.ts index 7f1ee8d4e..b0edaf5bd 100644 --- a/sim/blocks/blocks/google_sheets.ts +++ b/apps/sim/blocks/blocks/google_sheets.ts @@ -82,7 +82,8 @@ export const GoogleSheetsBlock: BlockConfig = { title: 'Values', type: 'long-input', layout: 'full', - placeholder: 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', + placeholder: + 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'write' }, }, { @@ -102,7 +103,8 @@ export const GoogleSheetsBlock: BlockConfig = { title: 'Values', type: 'long-input', layout: 'full', - placeholder: 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', + placeholder: + 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'update' }, }, { @@ -122,7 +124,8 @@ export const GoogleSheetsBlock: BlockConfig = { title: 'Values', type: 'long-input', layout: 'full', - placeholder: 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', + placeholder: + 'Enter values as JSON array of arrays (e.g., [["A1", "B1"], ["A2", "B2"]]) or an array of objects (e.g., [{"name":"John", "age":30}, {"name":"Jane", "age":25}])', condition: { field: 'operation', value: 'append' }, }, { @@ -149,7 +152,12 @@ export const GoogleSheetsBlock: BlockConfig = { }, ], tools: { - access: ['google_sheets_read', 'google_sheets_write', 'google_sheets_update', 'google_sheets_append'], + access: [ + 'google_sheets_read', + 'google_sheets_write', + 'google_sheets_update', + 'google_sheets_append', + ], config: { tool: (params) => { switch (params.operation) { diff --git a/sim/blocks/blocks/guesty.ts b/apps/sim/blocks/blocks/guesty.ts similarity index 100% rename from sim/blocks/blocks/guesty.ts rename to apps/sim/blocks/blocks/guesty.ts diff --git a/sim/blocks/blocks/image_generator.ts b/apps/sim/blocks/blocks/image_generator.ts similarity index 97% rename from sim/blocks/blocks/image_generator.ts rename to apps/sim/blocks/blocks/image_generator.ts index 7c57ed2d7..9e120d7fc 100644 --- a/sim/blocks/blocks/image_generator.ts +++ b/apps/sim/blocks/blocks/image_generator.ts @@ -32,9 +32,7 @@ export const ImageGeneratorBlock: BlockConfig = { title: 'Model', type: 'dropdown', layout: 'half', - options: [ - { label: 'DALL-E 3', id: 'dall-e-3' }, - ], + options: [{ label: 'DALL-E 3', id: 'dall-e-3' }], value: () => 'dall-e-3', }, { diff --git a/sim/blocks/blocks/jina.ts b/apps/sim/blocks/blocks/jina.ts similarity index 100% rename from sim/blocks/blocks/jina.ts rename to apps/sim/blocks/blocks/jina.ts diff --git a/sim/blocks/blocks/jira.ts b/apps/sim/blocks/blocks/jira.ts similarity index 94% rename from sim/blocks/blocks/jira.ts rename to apps/sim/blocks/blocks/jira.ts index dd91cca36..fb8a845cb 100644 --- a/sim/blocks/blocks/jira.ts +++ b/apps/sim/blocks/blocks/jira.ts @@ -1,8 +1,17 @@ import { JiraIcon } from '@/components/icons' +import { + JiraRetrieveResponse, + JiraRetrieveResponseBulk, + JiraUpdateResponse, + JiraWriteResponse, +} from '@/tools/jira/types' import { BlockConfig } from '../types' -import { JiraRetrieveResponse, JiraUpdateResponse, JiraWriteResponse, JiraRetrieveResponseBulk } from '@/tools/jira/types' -type JiraResponse = JiraRetrieveResponse | JiraUpdateResponse | JiraWriteResponse | JiraRetrieveResponseBulk +type JiraResponse = + | JiraRetrieveResponse + | JiraUpdateResponse + | JiraWriteResponse + | JiraRetrieveResponseBulk export const JiraBlock: BlockConfig = { type: 'jira', @@ -112,19 +121,19 @@ export const JiraBlock: BlockConfig = { accessToken: params.credential, domain: params.domain, } - + // Define allowed parameters for each operation switch (params.operation) { case 'write': { // For write operations, only include write-specific fields const writeParams = { projectId: params.projectId, - summary: params.summary || '', + summary: params.summary || '', description: params.description || '', issueType: params.issueType || 'Task', parent: params.parentIssue ? { key: params.parentIssue } : undefined, } - + return { ...baseParams, ...writeParams, @@ -138,7 +147,7 @@ export const JiraBlock: BlockConfig = { summary: params.summary || '', description: params.description || '', } - + return { ...baseParams, ...updateParams, @@ -186,8 +195,8 @@ export const JiraBlock: BlockConfig = { created: 'string', updated: 'string', success: 'boolean', - url: 'string' + url: 'string', }, }, }, -} \ No newline at end of file +} diff --git a/sim/blocks/blocks/linkup.ts b/apps/sim/blocks/blocks/linkup.ts similarity index 58% rename from sim/blocks/blocks/linkup.ts rename to apps/sim/blocks/blocks/linkup.ts index 09d90afc3..c48a67801 100644 --- a/sim/blocks/blocks/linkup.ts +++ b/apps/sim/blocks/blocks/linkup.ts @@ -1,12 +1,13 @@ import { LinkupIcon } from '@/components/icons' -import { BlockConfig } from '../types' import { LinkupSearchToolResponse } from '@/tools/linkup/types' +import { BlockConfig } from '../types' export const LinkupBlock: BlockConfig = { type: 'linkup', name: 'Linkup', description: 'Search the web with Linkup', - longDescription: 'Linkup Search allows you to search and retrieve up-to-date information from the web with source attribution.', + longDescription: + 'Linkup Search allows you to search and retrieve up-to-date information from the web with source attribution.', category: 'tools', bgColor: '#EAEADC', icon: LinkupIcon, @@ -20,38 +21,44 @@ export const LinkupBlock: BlockConfig = { placeholder: 'Enter your search query', }, { - id: 'outputType', - title: 'Output Type', - type: 'dropdown', - layout: 'half', - options: [ { label: 'Answer', id: 'sourcedAnswer' }, { label: 'Search', id: 'searchResults' }], - }, + id: 'outputType', + title: 'Output Type', + type: 'dropdown', + layout: 'half', + options: [ + { label: 'Answer', id: 'sourcedAnswer' }, + { label: 'Search', id: 'searchResults' }, + ], + }, { id: 'depth', title: 'Search Depth', type: 'dropdown', layout: 'half', - options: [ { label: 'Standard', id: 'standard' }, { label: 'Deep', id: 'deep' }], + options: [ + { label: 'Standard', id: 'standard' }, + { label: 'Deep', id: 'deep' }, + ], }, { - id: 'apiKey', - title: 'API Key', - type: 'short-input', - layout: 'full', - placeholder: 'Enter your Linkup API key', - password: true, + id: 'apiKey', + title: 'API Key', + type: 'short-input', + layout: 'full', + placeholder: 'Enter your Linkup API key', + password: true, }, ], tools: { access: ['linkup_search'], }, - + inputs: { q: { type: 'string', required: true }, apiKey: { type: 'string', required: true }, depth: { type: 'string', required: true }, - outputType: { type: 'string', required: true } + outputType: { type: 'string', required: true }, }, outputs: { @@ -62,4 +69,4 @@ export const LinkupBlock: BlockConfig = { }, }, }, -} +} diff --git a/sim/blocks/blocks/mem0.ts b/apps/sim/blocks/blocks/mem0.ts similarity index 88% rename from sim/blocks/blocks/mem0.ts rename to apps/sim/blocks/blocks/mem0.ts index fc8774340..b0e904e5e 100644 --- a/sim/blocks/blocks/mem0.ts +++ b/apps/sim/blocks/blocks/mem0.ts @@ -1,6 +1,6 @@ import { Mem0Icon } from '@/components/icons' -import { BlockConfig } from '../types' import { Mem0Response } from '@/tools/mem0/types' +import { BlockConfig } from '../types' export const Mem0Block: BlockConfig = { type: 'mem0', @@ -112,11 +112,7 @@ export const Mem0Block: BlockConfig = { }, ], tools: { - access: [ - 'mem0_add_memories', - 'mem0_search_memories', - 'mem0_get_memories', - ], + access: ['mem0_add_memories', 'mem0_search_memories', 'mem0_get_memories'], config: { tool: (params: Record) => { const operation = params.operation || 'add' @@ -134,35 +130,34 @@ export const Mem0Block: BlockConfig = { params: (params: Record) => { // Create detailed error information for any missing required fields const errors: string[] = [] - + // Validate required API key for all operations if (!params.apiKey) { - errors.push("API Key is required") + errors.push('API Key is required') } - + // For search operation, validate required fields if (params.operation === 'search') { if (!params.query || params.query.trim() === '') { - errors.push("Search Query is required") + errors.push('Search Query is required') } - + if (!params.userId) { - errors.push("User ID is required") + errors.push('User ID is required') } } - + // For add operation, validate required fields if (params.operation === 'add') { if (!params.messages) { - errors.push("Messages are required for add operation") + errors.push('Messages are required for add operation') } else { try { - const messagesArray = typeof params.messages === 'string' - ? JSON.parse(params.messages) - : params.messages - + const messagesArray = + typeof params.messages === 'string' ? JSON.parse(params.messages) : params.messages + if (!Array.isArray(messagesArray) || messagesArray.length === 0) { - errors.push("Messages must be a non-empty array") + errors.push('Messages must be a non-empty array') } else { for (const msg of messagesArray) { if (!msg.role || !msg.content) { @@ -172,44 +167,45 @@ export const Mem0Block: BlockConfig = { } } } catch (e: any) { - errors.push("Messages must be valid JSON") + errors.push('Messages must be valid JSON') } } - + if (!params.userId) { - errors.push("User ID is required") + errors.push('User ID is required') } } - + // Throw error if any required fields are missing if (errors.length > 0) { throw new Error(`Mem0 Block Error: ${errors.join(', ')}`) } - + const result: Record = { apiKey: params.apiKey, } // Add any identifiers that are present if (params.userId) result.userId = params.userId - + // Add version if specified if (params.version) result.version = params.version if (params.limit) result.limit = params.limit const operation = params.operation || 'add' - + // Process operation-specific parameters switch (operation) { case 'add': if (params.messages) { try { // Ensure messages are properly formatted - const messagesArray = typeof params.messages === 'string' - ? JSON.parse(params.messages) - : params.messages - + const messagesArray = + typeof params.messages === 'string' + ? JSON.parse(params.messages) + : params.messages + // Validate message structure if (Array.isArray(messagesArray) && messagesArray.length > 0) { let validMessages = true @@ -224,7 +220,9 @@ export const Mem0Block: BlockConfig = { } else { // Consistent with other error handling - collect in errors array errors.push('Invalid message format - each message must have role and content') - throw new Error(`Mem0 Block Error: Invalid message format - each message must have role and content`) + throw new Error( + `Mem0 Block Error: Invalid message format - each message must have role and content` + ) } } else { // Consistent with other error handling @@ -242,7 +240,7 @@ export const Mem0Block: BlockConfig = { case 'search': if (params.query) { result.query = params.query - + // Check if we have at least one identifier for search if (!params.userId) { errors.push('Search requires a User ID') @@ -252,7 +250,7 @@ export const Mem0Block: BlockConfig = { errors.push('Search requires a query parameter') throw new Error(`Mem0 Block Error: Search requires a query parameter`) } - + // Include limit if specified if (params.limit) { result.limit = Number(params.limit) @@ -262,18 +260,18 @@ export const Mem0Block: BlockConfig = { if (params.memoryId) { result.memoryId = params.memoryId } - + // Add date range filtering for v2 get memories if (params.startDate) { result.startDate = params.startDate } - + if (params.endDate) { result.endDate = params.endDate } break } - + return result }, }, @@ -299,4 +297,4 @@ export const Mem0Block: BlockConfig = { }, }, }, -} \ No newline at end of file +} diff --git a/sim/blocks/blocks/memory.ts b/apps/sim/blocks/blocks/memory.ts similarity index 100% rename from sim/blocks/blocks/memory.ts rename to apps/sim/blocks/blocks/memory.ts diff --git a/sim/blocks/blocks/mistral_parse.ts b/apps/sim/blocks/blocks/mistral_parse.ts similarity index 99% rename from sim/blocks/blocks/mistral_parse.ts rename to apps/sim/blocks/blocks/mistral_parse.ts index 00398863f..c00035a8b 100644 --- a/sim/blocks/blocks/mistral_parse.ts +++ b/apps/sim/blocks/blocks/mistral_parse.ts @@ -213,4 +213,4 @@ export const MistralParseBlock: BlockConfig = { }, }, }, -} \ No newline at end of file +} diff --git a/sim/blocks/blocks/notion.ts b/apps/sim/blocks/blocks/notion.ts similarity index 100% rename from sim/blocks/blocks/notion.ts rename to apps/sim/blocks/blocks/notion.ts diff --git a/sim/blocks/blocks/openai.ts b/apps/sim/blocks/blocks/openai.ts similarity index 100% rename from sim/blocks/blocks/openai.ts rename to apps/sim/blocks/blocks/openai.ts diff --git a/sim/blocks/blocks/perplexity.ts b/apps/sim/blocks/blocks/perplexity.ts similarity index 100% rename from sim/blocks/blocks/perplexity.ts rename to apps/sim/blocks/blocks/perplexity.ts diff --git a/sim/blocks/blocks/pinecone.ts b/apps/sim/blocks/blocks/pinecone.ts similarity index 100% rename from sim/blocks/blocks/pinecone.ts rename to apps/sim/blocks/blocks/pinecone.ts diff --git a/sim/blocks/blocks/reddit.ts b/apps/sim/blocks/blocks/reddit.ts similarity index 79% rename from sim/blocks/blocks/reddit.ts rename to apps/sim/blocks/blocks/reddit.ts index 70dc4efa3..782c324d0 100644 --- a/sim/blocks/blocks/reddit.ts +++ b/apps/sim/blocks/blocks/reddit.ts @@ -1,8 +1,14 @@ import { RedditIcon } from '@/components/icons' -import { RedditHotPostsResponse, RedditPostsResponse, RedditCommentsResponse } from '@/tools/reddit/types' +import { + RedditCommentsResponse, + RedditHotPostsResponse, + RedditPostsResponse, +} from '@/tools/reddit/types' import { BlockConfig } from '../types' -export const RedditBlock: BlockConfig = { +export const RedditBlock: BlockConfig< + RedditHotPostsResponse | RedditPostsResponse | RedditCommentsResponse +> = { type: 'reddit', name: 'Reddit', description: 'Access Reddit data and content', @@ -20,10 +26,10 @@ export const RedditBlock: BlockConfig { const action = inputs.action || 'get_posts' - + if (action === 'get_comments') { return 'reddit_get_comments' } - - return 'reddit_get_posts' + + return 'reddit_get_posts' }, params: (inputs) => { const action = inputs.action || 'get_posts' - + if (action === 'get_comments') { return { postId: inputs.postId, subreddit: inputs.subreddit, sort: inputs.commentSort, - limit: inputs.commentLimit ? parseInt(inputs.commentLimit) : undefined + limit: inputs.commentLimit ? parseInt(inputs.commentLimit) : undefined, } } - + return { subreddit: inputs.subreddit, sort: inputs.sort, limit: inputs.limit ? parseInt(inputs.limit) : undefined, - time: inputs.sort === 'top' ? inputs.time : undefined + time: inputs.sort === 'top' ? inputs.time : undefined, } - } - } + }, + }, }, inputs: { action: { @@ -182,7 +188,8 @@ export const RedditBlock: BlockConfig = { type: 's3', name: 'S3', description: 'View S3 files', - longDescription: - 'Retrieve and view files from Amazon S3 buckets using presigned URLs.', + longDescription: 'Retrieve and view files from Amazon S3 buckets using presigned URLs.', category: 'tools', bgColor: '#E0E0E0', icon: S3Icon, @@ -56,25 +55,25 @@ export const S3Block: BlockConfig = { try { const url = new URL(params.s3Uri) const hostname = url.hostname - + // Extract bucket name from hostname const bucketName = hostname.split('.')[0] - + // Extract region from hostname const regionMatch = hostname.match(/s3[.-]([^.]+)\.amazonaws\.com/) const region = regionMatch ? regionMatch[1] : 'us-east-1' - + // Extract object key from pathname (remove leading slash) const objectKey = url.pathname.startsWith('/') ? url.pathname.substring(1) : url.pathname - + if (!bucketName) { throw new Error('Could not extract bucket name from URL') } - + if (!objectKey) { throw new Error('No object key found in URL') } - + return { accessKeyId: params.accessKeyId, secretAccessKey: params.secretAccessKey, @@ -83,7 +82,9 @@ export const S3Block: BlockConfig = { objectKey, } } catch (error) { - throw new Error('Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file') + throw new Error( + 'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file' + ) } }, }, @@ -97,8 +98,8 @@ export const S3Block: BlockConfig = { response: { type: { url: 'string', - metadata: 'json' - } - } + metadata: 'json', + }, + }, }, -} \ No newline at end of file +} diff --git a/sim/blocks/blocks/serper.ts b/apps/sim/blocks/blocks/serper.ts similarity index 100% rename from sim/blocks/blocks/serper.ts rename to apps/sim/blocks/blocks/serper.ts diff --git a/sim/blocks/blocks/slack.ts b/apps/sim/blocks/blocks/slack.ts similarity index 100% rename from sim/blocks/blocks/slack.ts rename to apps/sim/blocks/blocks/slack.ts diff --git a/sim/blocks/blocks/stagehand.ts b/apps/sim/blocks/blocks/stagehand.ts similarity index 100% rename from sim/blocks/blocks/stagehand.ts rename to apps/sim/blocks/blocks/stagehand.ts diff --git a/sim/blocks/blocks/stagehand_agent.ts b/apps/sim/blocks/blocks/stagehand_agent.ts similarity index 100% rename from sim/blocks/blocks/stagehand_agent.ts rename to apps/sim/blocks/blocks/stagehand_agent.ts diff --git a/sim/blocks/blocks/starter.ts b/apps/sim/blocks/blocks/starter.ts similarity index 100% rename from sim/blocks/blocks/starter.ts rename to apps/sim/blocks/blocks/starter.ts diff --git a/sim/blocks/blocks/supabase.ts b/apps/sim/blocks/blocks/supabase.ts similarity index 100% rename from sim/blocks/blocks/supabase.ts rename to apps/sim/blocks/blocks/supabase.ts diff --git a/sim/blocks/blocks/tavily.ts b/apps/sim/blocks/blocks/tavily.ts similarity index 100% rename from sim/blocks/blocks/tavily.ts rename to apps/sim/blocks/blocks/tavily.ts diff --git a/sim/blocks/blocks/telegram.ts b/apps/sim/blocks/blocks/telegram.ts similarity index 97% rename from sim/blocks/blocks/telegram.ts rename to apps/sim/blocks/blocks/telegram.ts index 521210b13..bca15d078 100644 --- a/sim/blocks/blocks/telegram.ts +++ b/apps/sim/blocks/blocks/telegram.ts @@ -57,8 +57,8 @@ export const TelegramBlock: BlockConfig = { response: { type: { ok: 'boolean', - result: 'json' - } - } + result: 'json', + }, + }, }, } diff --git a/sim/blocks/blocks/thinking.ts b/apps/sim/blocks/blocks/thinking.ts similarity index 100% rename from sim/blocks/blocks/thinking.ts rename to apps/sim/blocks/blocks/thinking.ts diff --git a/sim/blocks/blocks/translate.ts b/apps/sim/blocks/blocks/translate.ts similarity index 100% rename from sim/blocks/blocks/translate.ts rename to apps/sim/blocks/blocks/translate.ts diff --git a/sim/blocks/blocks/twilio.ts b/apps/sim/blocks/blocks/twilio.ts similarity index 100% rename from sim/blocks/blocks/twilio.ts rename to apps/sim/blocks/blocks/twilio.ts diff --git a/sim/blocks/blocks/typeform.ts b/apps/sim/blocks/blocks/typeform.ts similarity index 100% rename from sim/blocks/blocks/typeform.ts rename to apps/sim/blocks/blocks/typeform.ts diff --git a/sim/blocks/blocks/vision.ts b/apps/sim/blocks/blocks/vision.ts similarity index 100% rename from sim/blocks/blocks/vision.ts rename to apps/sim/blocks/blocks/vision.ts diff --git a/sim/blocks/blocks/whatsapp.ts b/apps/sim/blocks/blocks/whatsapp.ts similarity index 100% rename from sim/blocks/blocks/whatsapp.ts rename to apps/sim/blocks/blocks/whatsapp.ts diff --git a/sim/blocks/blocks/x.ts b/apps/sim/blocks/blocks/x.ts similarity index 100% rename from sim/blocks/blocks/x.ts rename to apps/sim/blocks/blocks/x.ts diff --git a/sim/blocks/blocks/youtube.ts b/apps/sim/blocks/blocks/youtube.ts similarity index 100% rename from sim/blocks/blocks/youtube.ts rename to apps/sim/blocks/blocks/youtube.ts diff --git a/sim/blocks/index.ts b/apps/sim/blocks/index.ts similarity index 57% rename from sim/blocks/index.ts rename to apps/sim/blocks/index.ts index 7066fa156..d46c9ebcd 100644 --- a/sim/blocks/index.ts +++ b/apps/sim/blocks/index.ts @@ -1,19 +1,12 @@ -import { - registry, +import { getAllBlocks, + getAllBlockTypes, getBlock, getBlocksByCategory, - getAllBlockTypes, - isValidBlockType + isValidBlockType, + registry, } from './registry' -export { - registry, - getBlock, - getBlocksByCategory, - getAllBlockTypes, - isValidBlockType, - getAllBlocks -} +export { registry, getBlock, getBlocksByCategory, getAllBlockTypes, isValidBlockType, getAllBlocks } export type { BlockConfig } from './types' diff --git a/sim/blocks/registry.ts b/apps/sim/blocks/registry.ts similarity index 99% rename from sim/blocks/registry.ts rename to apps/sim/blocks/registry.ts index 50cb66149..5478dee6a 100644 --- a/sim/blocks/registry.ts +++ b/apps/sim/blocks/registry.ts @@ -1,10 +1,7 @@ /** * Blocks Registry - * + * */ - -import { BlockConfig } from './types' - // Import all blocks directly here import { AgentBlock } from './blocks/agent' import { AirtableBlock } from './blocks/airtable' @@ -14,8 +11,6 @@ import { BrowserUseBlock } from './blocks/browser_use' import { ClayBlock } from './blocks/clay' import { ConditionBlock } from './blocks/condition' import { ConfluenceBlock } from './blocks/confluence' -import { GoogleDocsBlock } from './blocks/google_docs' -import { GoogleDriveBlock } from './blocks/google_drive' import { ElevenLabsBlock } from './blocks/elevenlabs' import { EvaluatorBlock } from './blocks/evaluator' import { ExaBlock } from './blocks/exa' @@ -25,26 +20,31 @@ import { FunctionBlock } from './blocks/function' import { GitHubBlock } from './blocks/github' import { GmailBlock } from './blocks/gmail' import { GoogleSearchBlock } from './blocks/google' +import { GoogleDocsBlock } from './blocks/google_docs' +import { GoogleDriveBlock } from './blocks/google_drive' +import { GoogleSheetsBlock } from './blocks/google_sheets' // import { GuestyBlock } from './blocks/guesty' import { ImageGeneratorBlock } from './blocks/image_generator' import { JinaBlock } from './blocks/jina' -import { LinkupBlock } from './blocks/linkup' -import { MistralParseBlock } from './blocks/mistral_parse' import { JiraBlock } from './blocks/jira' +import { LinkupBlock } from './blocks/linkup' +import { Mem0Block } from './blocks/mem0' +import { MistralParseBlock } from './blocks/mistral_parse' import { NotionBlock } from './blocks/notion' import { OpenAIBlock } from './blocks/openai' import { PerplexityBlock } from './blocks/perplexity' import { PineconeBlock } from './blocks/pinecone' import { RedditBlock } from './blocks/reddit' import { RouterBlock } from './blocks/router' +import { S3Block } from './blocks/s3' import { SerperBlock } from './blocks/serper' -import { GoogleSheetsBlock } from './blocks/google_sheets' import { SlackBlock } from './blocks/slack' import { StagehandBlock } from './blocks/stagehand' import { StagehandAgentBlock } from './blocks/stagehand_agent' import { StarterBlock } from './blocks/starter' import { SupabaseBlock } from './blocks/supabase' import { TavilyBlock } from './blocks/tavily' +import { TelegramBlock } from './blocks/telegram' import { ThinkingBlock } from './blocks/thinking' import { TranslateBlock } from './blocks/translate' import { TwilioSMSBlock } from './blocks/twilio' @@ -53,9 +53,7 @@ import { VisionBlock } from './blocks/vision' import { WhatsAppBlock } from './blocks/whatsapp' import { XBlock } from './blocks/x' import { YouTubeBlock } from './blocks/youtube' -import { Mem0Block } from './blocks/mem0' -import { S3Block } from './blocks/s3' -import { TelegramBlock } from './blocks/telegram' +import { BlockConfig } from './types' // Registry of all available blocks, alphabetically sorted export const registry: Record = { @@ -108,7 +106,7 @@ export const registry: Record = { whatsapp: WhatsAppBlock, x: XBlock, youtube: YouTubeBlock, - telegram: TelegramBlock + telegram: TelegramBlock, } // Helper functions to access the registry @@ -121,4 +119,4 @@ export const getAllBlockTypes = (): string[] => Object.keys(registry) export const isValidBlockType = (type: string): type is string => type in registry -export const getAllBlocks = (): BlockConfig[] => Object.values(registry) \ No newline at end of file +export const getAllBlocks = (): BlockConfig[] => Object.values(registry) diff --git a/sim/blocks/types.ts b/apps/sim/blocks/types.ts similarity index 100% rename from sim/blocks/types.ts rename to apps/sim/blocks/types.ts diff --git a/sim/blocks/utils.ts b/apps/sim/blocks/utils.ts similarity index 100% rename from sim/blocks/utils.ts rename to apps/sim/blocks/utils.ts diff --git a/sim/components.json b/apps/sim/components.json similarity index 100% rename from sim/components.json rename to apps/sim/components.json diff --git a/sim/components/emails/base-styles.ts b/apps/sim/components/emails/base-styles.ts similarity index 100% rename from sim/components/emails/base-styles.ts rename to apps/sim/components/emails/base-styles.ts diff --git a/sim/components/emails/footer.tsx b/apps/sim/components/emails/footer.tsx similarity index 100% rename from sim/components/emails/footer.tsx rename to apps/sim/components/emails/footer.tsx diff --git a/sim/components/emails/invitation-email.tsx b/apps/sim/components/emails/invitation-email.tsx similarity index 87% rename from sim/components/emails/invitation-email.tsx rename to apps/sim/components/emails/invitation-email.tsx index 85bc6e31e..4cfb86237 100644 --- a/sim/components/emails/invitation-email.tsx +++ b/apps/sim/components/emails/invitation-email.tsx @@ -65,15 +65,16 @@ export const InvitationEmail = ({
          Hello, - {inviterName} has invited you to join {organizationName} on Sim Studio. - Sim Studio is a powerful, user-friendly platform for building, testing, and optimizing agentic workflows. + {inviterName} has invited you to join{' '} + {organizationName} on Sim Studio. Sim Studio is a powerful, + user-friendly platform for building, testing, and optimizing agentic workflows. Accept Invitation - This invitation will expire in 48 hours. If you believe this invitation was sent in error, - please ignore this email. + This invitation will expire in 48 hours. If you believe this invitation was sent in + error, please ignore this email. Best regards, @@ -88,8 +89,8 @@ export const InvitationEmail = ({ color: '#666666', }} > - This email was sent on {format(updatedDate, 'MMMM do, yyyy')} to {invitedEmail} with an invitation - to join {organizationName} on Sim Studio. + This email was sent on {format(updatedDate, 'MMMM do, yyyy')} to {invitedEmail} with + an invitation to join {organizationName} on Sim Studio.
          @@ -100,4 +101,4 @@ export const InvitationEmail = ({ ) } -export default InvitationEmail \ No newline at end of file +export default InvitationEmail diff --git a/sim/components/emails/otp-verification-email.tsx b/apps/sim/components/emails/otp-verification-email.tsx similarity index 100% rename from sim/components/emails/otp-verification-email.tsx rename to apps/sim/components/emails/otp-verification-email.tsx diff --git a/sim/components/emails/render-email.ts b/apps/sim/components/emails/render-email.ts similarity index 93% rename from sim/components/emails/render-email.ts rename to apps/sim/components/emails/render-email.ts index faef57e3b..50e26939b 100644 --- a/sim/components/emails/render-email.ts +++ b/apps/sim/components/emails/render-email.ts @@ -1,9 +1,9 @@ import { renderAsync } from '@react-email/components' +import { InvitationEmail } from './invitation-email' import { OTPVerificationEmail } from './otp-verification-email' import { ResetPasswordEmail } from './reset-password-email' import { WaitlistApprovalEmail } from './waitlist-approval-email' import { WaitlistConfirmationEmail } from './waitlist-confirmation-email' -import { InvitationEmail } from './invitation-email' /** * Renders the OTP verification email to HTML @@ -35,13 +35,15 @@ export async function renderInvitationEmail( inviteLink: string, invitedEmail: string ): Promise { - return await renderAsync(InvitationEmail({ - inviterName, - organizationName, - inviteLink, - invitedEmail, - updatedDate: new Date() - })) + return await renderAsync( + InvitationEmail({ + inviterName, + organizationName, + inviteLink, + invitedEmail, + updatedDate: new Date(), + }) + ) } /** diff --git a/sim/components/emails/reset-password-email.tsx b/apps/sim/components/emails/reset-password-email.tsx similarity index 100% rename from sim/components/emails/reset-password-email.tsx rename to apps/sim/components/emails/reset-password-email.tsx diff --git a/sim/components/emails/waitlist-approval-email.tsx b/apps/sim/components/emails/waitlist-approval-email.tsx similarity index 100% rename from sim/components/emails/waitlist-approval-email.tsx rename to apps/sim/components/emails/waitlist-approval-email.tsx diff --git a/sim/components/emails/waitlist-confirmation-email.tsx b/apps/sim/components/emails/waitlist-confirmation-email.tsx similarity index 100% rename from sim/components/emails/waitlist-confirmation-email.tsx rename to apps/sim/components/emails/waitlist-confirmation-email.tsx diff --git a/sim/components/icons.tsx b/apps/sim/components/icons.tsx similarity index 64% rename from sim/components/icons.tsx rename to apps/sim/components/icons.tsx index 580b39ccf..2277c3dab 100644 --- a/sim/components/icons.tsx +++ b/apps/sim/components/icons.tsx @@ -978,7 +978,7 @@ export function TranslateIcon(props: SVGProps) { - + @@ -988,151 +988,144 @@ export function TranslateIcon(props: SVGProps) { export function SlackIcon(props: SVGProps) { return ( - - - - - - - + + + + + + + ) } export function GithubIcon(props: SVGProps) { return ( - - - + + + ) } - -export function SerperIcon(props: SVGProps){ +export function SerperIcon(props: SVGProps) { return ( - - - - - - - - - - - - - + + + + + + + + + + + + + ) } export function TavilyIcon(props: SVGProps) { return ( - - - - - - - - + + + + + + + + ) } export function ConnectIcon(props: SVGProps) { return ( - - + {...props} + width="24" + height="24" + viewBox="-2 -2 28 28" + fill="none" + xmlns="http://www.w3.org/2000/svg" + > + + ) } @@ -1307,7 +1300,38 @@ export function GoogleSheetsIcon(props: SVGProps) { } export const S3Icon = (props: SVGProps) => ( - + + + + + + + + + + + + + + + + ) export function GoogleIcon(props: SVGProps) { @@ -1315,20 +1339,20 @@ export function GoogleIcon(props: SVGProps) { - - - + d="M43.611,20.083H42V20H24v8h11.303c-1.649,4.657-6.08,8-11.303,8c-6.627,0-12-5.373-12-12 s5.373-12,12-12c3.059,0,5.842,1.154,7.961,3.039l5.657-5.657C34.046,6.053,29.268,4,24,4C12.955,4,4,12.955,4,24s8.955,20,20,20 s20-8.955,20-20C44,22.659,43.862,21.35,43.611,20.083z" + /> + + + ) } @@ -1336,7 +1360,7 @@ export function GoogleIcon(props: SVGProps) { export function DiscordIcon(props: SVGProps) { return ( - + ) } @@ -1349,9 +1373,9 @@ export function CrunchbaseIcon(props: SVGProps) { width="24" height="24" viewBox="0 0 24 24" - xmlns="http://www.w3.org/2000/svg" - > - + xmlns="http://www.w3.org/2000/svg" + > + ) } @@ -2147,31 +2171,64 @@ l57 -85 -48 -124 c-203 -517 -79 -930 346 -1155 159 -85 441 -71 585 28 l111 export function Mem0Icon(props: SVGProps) { return ( - - + + ) } export function ElevenLabsIcon(props: SVGProps) { return ( - - - + + + ) } export function LinkupIcon(props: SVGProps) { return ( - - + + - - - - - + + + + + ) } @@ -2187,10 +2244,10 @@ export function JiraIcon(props: SVGProps) { focusable="false" aria-hidden="true" > - + /> ) } @@ -2198,41 +2255,42 @@ export function JiraIcon(props: SVGProps) { export function TelegramIcon(props: SVGProps) { return ( - - - - - - - - + + + + + + + + ) } export function ClayIcon(props: SVGProps) { return ( - + - - + + C195.835999,149.785568 198.051407,147.877594 200.266830,145.969620 z" + /> ) } - diff --git a/sim/components/ui/alert-dialog.tsx b/apps/sim/components/ui/alert-dialog.tsx similarity index 100% rename from sim/components/ui/alert-dialog.tsx rename to apps/sim/components/ui/alert-dialog.tsx diff --git a/sim/components/ui/alert.tsx b/apps/sim/components/ui/alert.tsx similarity index 100% rename from sim/components/ui/alert.tsx rename to apps/sim/components/ui/alert.tsx diff --git a/sim/components/ui/badge.tsx b/apps/sim/components/ui/badge.tsx similarity index 100% rename from sim/components/ui/badge.tsx rename to apps/sim/components/ui/badge.tsx diff --git a/sim/components/ui/breadcrumb.tsx b/apps/sim/components/ui/breadcrumb.tsx similarity index 100% rename from sim/components/ui/breadcrumb.tsx rename to apps/sim/components/ui/breadcrumb.tsx diff --git a/sim/components/ui/button.tsx b/apps/sim/components/ui/button.tsx similarity index 100% rename from sim/components/ui/button.tsx rename to apps/sim/components/ui/button.tsx diff --git a/sim/components/ui/calendar.tsx b/apps/sim/components/ui/calendar.tsx similarity index 88% rename from sim/components/ui/calendar.tsx rename to apps/sim/components/ui/calendar.tsx index d1713131b..d351a467c 100644 --- a/sim/components/ui/calendar.tsx +++ b/apps/sim/components/ui/calendar.tsx @@ -46,11 +46,15 @@ function Calendar({ className, classNames, showOutsideDays = true, ...props }: C ...classNames, }} components={{ - IconLeft: ({ className, ...props }) => ( - + PreviousMonthButton: ({ ...props }) => ( + ), - IconRight: ({ className, ...props }) => ( - + NextMonthButton: ({ ...props }) => ( + ), }} {...props} diff --git a/sim/components/ui/card.tsx b/apps/sim/components/ui/card.tsx similarity index 100% rename from sim/components/ui/card.tsx rename to apps/sim/components/ui/card.tsx diff --git a/sim/components/ui/checkbox.tsx b/apps/sim/components/ui/checkbox.tsx similarity index 100% rename from sim/components/ui/checkbox.tsx rename to apps/sim/components/ui/checkbox.tsx diff --git a/sim/components/ui/code-block.tsx b/apps/sim/components/ui/code-block.tsx similarity index 100% rename from sim/components/ui/code-block.tsx rename to apps/sim/components/ui/code-block.tsx diff --git a/sim/components/ui/collapsible.tsx b/apps/sim/components/ui/collapsible.tsx similarity index 100% rename from sim/components/ui/collapsible.tsx rename to apps/sim/components/ui/collapsible.tsx diff --git a/sim/components/ui/command.tsx b/apps/sim/components/ui/command.tsx similarity index 97% rename from sim/components/ui/command.tsx rename to apps/sim/components/ui/command.tsx index cbb579bcf..866a1f484 100644 --- a/sim/components/ui/command.tsx +++ b/apps/sim/components/ui/command.tsx @@ -72,6 +72,12 @@ import { cn } from '@/lib/utils' // This file is not typed correctly from shadcn, so we're disabling the type checker // @ts-nocheck +// This file is not typed correctly from shadcn, so we're disabling the type checker +// @ts-nocheck + +// This file is not typed correctly from shadcn, so we're disabling the type checker +// @ts-nocheck + const Command = React.forwardRef< React.ElementRef, React.ComponentPropsWithoutRef & { diff --git a/sim/components/ui/copy-button.tsx b/apps/sim/components/ui/copy-button.tsx similarity index 100% rename from sim/components/ui/copy-button.tsx rename to apps/sim/components/ui/copy-button.tsx diff --git a/sim/components/ui/dialog.tsx b/apps/sim/components/ui/dialog.tsx similarity index 100% rename from sim/components/ui/dialog.tsx rename to apps/sim/components/ui/dialog.tsx diff --git a/sim/components/ui/dropdown-menu.tsx b/apps/sim/components/ui/dropdown-menu.tsx similarity index 100% rename from sim/components/ui/dropdown-menu.tsx rename to apps/sim/components/ui/dropdown-menu.tsx diff --git a/sim/components/ui/env-var-dropdown.tsx b/apps/sim/components/ui/env-var-dropdown.tsx similarity index 100% rename from sim/components/ui/env-var-dropdown.tsx rename to apps/sim/components/ui/env-var-dropdown.tsx diff --git a/sim/components/ui/form.tsx b/apps/sim/components/ui/form.tsx similarity index 100% rename from sim/components/ui/form.tsx rename to apps/sim/components/ui/form.tsx diff --git a/sim/components/ui/formatted-text.tsx b/apps/sim/components/ui/formatted-text.tsx similarity index 100% rename from sim/components/ui/formatted-text.tsx rename to apps/sim/components/ui/formatted-text.tsx diff --git a/sim/components/ui/input-otp-form.tsx b/apps/sim/components/ui/input-otp-form.tsx similarity index 92% rename from sim/components/ui/input-otp-form.tsx rename to apps/sim/components/ui/input-otp-form.tsx index 779383e53..c60115163 100644 --- a/sim/components/ui/input-otp-form.tsx +++ b/apps/sim/components/ui/input-otp-form.tsx @@ -1,9 +1,9 @@ 'use client' import { useState } from 'react' +import { Loader2 } from 'lucide-react' import { Button } from './button' import { InputOTP, InputOTPGroup, InputOTPSlot } from './input-otp' -import { Loader2 } from 'lucide-react' interface OTPInputFormProps { onSubmit: (otp: string) => void @@ -54,11 +54,7 @@ export function OTPInputForm({ {error &&

          {error}

          } - ) -} \ No newline at end of file +} diff --git a/sim/components/ui/input-otp.tsx b/apps/sim/components/ui/input-otp.tsx similarity index 100% rename from sim/components/ui/input-otp.tsx rename to apps/sim/components/ui/input-otp.tsx diff --git a/sim/components/ui/input.tsx b/apps/sim/components/ui/input.tsx similarity index 100% rename from sim/components/ui/input.tsx rename to apps/sim/components/ui/input.tsx diff --git a/sim/components/ui/label.tsx b/apps/sim/components/ui/label.tsx similarity index 100% rename from sim/components/ui/label.tsx rename to apps/sim/components/ui/label.tsx diff --git a/sim/components/ui/loading-agent.tsx b/apps/sim/components/ui/loading-agent.tsx similarity index 100% rename from sim/components/ui/loading-agent.tsx rename to apps/sim/components/ui/loading-agent.tsx diff --git a/sim/components/ui/notice.tsx b/apps/sim/components/ui/notice.tsx similarity index 100% rename from sim/components/ui/notice.tsx rename to apps/sim/components/ui/notice.tsx diff --git a/sim/components/ui/popover.tsx b/apps/sim/components/ui/popover.tsx similarity index 100% rename from sim/components/ui/popover.tsx rename to apps/sim/components/ui/popover.tsx diff --git a/sim/components/ui/progress.tsx b/apps/sim/components/ui/progress.tsx similarity index 100% rename from sim/components/ui/progress.tsx rename to apps/sim/components/ui/progress.tsx diff --git a/sim/components/ui/radio-group.tsx b/apps/sim/components/ui/radio-group.tsx similarity index 70% rename from sim/components/ui/radio-group.tsx rename to apps/sim/components/ui/radio-group.tsx index e9bde1793..c4292e320 100644 --- a/sim/components/ui/radio-group.tsx +++ b/apps/sim/components/ui/radio-group.tsx @@ -1,22 +1,15 @@ -"use client" +'use client' -import * as React from "react" -import * as RadioGroupPrimitive from "@radix-ui/react-radio-group" -import { Circle } from "lucide-react" - -import { cn } from "@/lib/utils" +import * as React from 'react' +import * as RadioGroupPrimitive from '@radix-ui/react-radio-group' +import { Circle } from 'lucide-react' +import { cn } from '@/lib/utils' const RadioGroup = React.forwardRef< React.ElementRef, React.ComponentPropsWithoutRef >(({ className, ...props }, ref) => { - return ( - - ) + return }) RadioGroup.displayName = RadioGroupPrimitive.Root.displayName @@ -28,7 +21,7 @@ const RadioGroupItem = React.forwardRef< user.id, { onDelete: 'cascade' }), - activeOrganizationId: text('active_organization_id').references(() => organization.id, { onDelete: 'set null' }), + activeOrganizationId: text('active_organization_id').references(() => organization.id, { + onDelete: 'set null', + }), }) export const account = pgTable('account', { @@ -67,8 +69,7 @@ export const workflow = pgTable('workflow', { userId: text('user_id') .notNull() .references(() => user.id, { onDelete: 'cascade' }), - workspaceId: text('workspace_id') - .references(() => workspace.id, { onDelete: 'cascade' }), + workspaceId: text('workspace_id').references(() => workspace.id, { onDelete: 'cascade' }), name: text('name').notNull(), description: text('description'), state: json('state').notNull(), @@ -128,20 +129,20 @@ export const settings = pgTable('settings', { .notNull() .references(() => user.id, { onDelete: 'cascade' }) .unique(), // One settings record per user - + // General settings theme: text('theme').notNull().default('system'), debugMode: boolean('debug_mode').notNull().default(false), autoConnect: boolean('auto_connect').notNull().default(true), autoFillEnvVars: boolean('auto_fill_env_vars').notNull().default(true), - + // Privacy settings telemetryEnabled: boolean('telemetry_enabled').notNull().default(true), telemetryNotifiedUser: boolean('telemetry_notified_user').notNull().default(false), - + // Keep general for future flexible settings general: json('general').notNull().default('{}'), - + updatedAt: timestamp('updated_at').notNull().defaultNow(), }) @@ -240,7 +241,7 @@ export const customTools = pgTable('custom_tools', { updatedAt: timestamp('updated_at').notNull().defaultNow(), }) -export const subscription = pgTable("subscription", { +export const subscription = pgTable('subscription', { id: text('id').primaryKey(), plan: text('plan').notNull(), referenceId: text('reference_id').notNull(), @@ -252,43 +253,45 @@ export const subscription = pgTable("subscription", { cancelAtPeriodEnd: boolean('cancel_at_period_end'), seats: integer('seats'), trialStart: timestamp('trial_start'), - trialEnd: timestamp('trial_end') + trialEnd: timestamp('trial_end'), }) -export const chat = pgTable('chat', { - id: text('id').primaryKey(), - workflowId: text('workflow_id') - .notNull() - .references(() => workflow.id, { onDelete: 'cascade' }), - userId: text('user_id') - .notNull() - .references(() => user.id, { onDelete: 'cascade' }), - subdomain: text('subdomain').notNull(), - title: text('title').notNull(), - description: text('description'), - isActive: boolean('is_active').notNull().default(true), - customizations: json('customizations').default('{}'), // For UI customization options - - // Authentication options - authType: text('auth_type').notNull().default('public'), // 'public', 'password', 'email' - password: text('password'), // Stored hashed, populated when authType is 'password' - allowedEmails: json('allowed_emails').default('[]'), // Array of allowed emails or domains when authType is 'email' - - // Output configuration - outputConfigs: json('output_configs').default('[]'), // Array of {blockId, path} objects - - createdAt: timestamp('created_at').notNull().defaultNow(), - updatedAt: timestamp('updated_at').notNull().defaultNow(), -}, -(table) => { - return { - // Ensure subdomains are unique - subdomainIdx: uniqueIndex('subdomain_idx').on(table.subdomain), +export const chat = pgTable( + 'chat', + { + id: text('id').primaryKey(), + workflowId: text('workflow_id') + .notNull() + .references(() => workflow.id, { onDelete: 'cascade' }), + userId: text('user_id') + .notNull() + .references(() => user.id, { onDelete: 'cascade' }), + subdomain: text('subdomain').notNull(), + title: text('title').notNull(), + description: text('description'), + isActive: boolean('is_active').notNull().default(true), + customizations: json('customizations').default('{}'), // For UI customization options + + // Authentication options + authType: text('auth_type').notNull().default('public'), // 'public', 'password', 'email' + password: text('password'), // Stored hashed, populated when authType is 'password' + allowedEmails: json('allowed_emails').default('[]'), // Array of allowed emails or domains when authType is 'email' + + // Output configuration + outputConfigs: json('output_configs').default('[]'), // Array of {blockId, path} objects + + createdAt: timestamp('created_at').notNull().defaultNow(), + updatedAt: timestamp('updated_at').notNull().defaultNow(), + }, + (table) => { + return { + // Ensure subdomains are unique + subdomainIdx: uniqueIndex('subdomain_idx').on(table.subdomain), + } } -} ) -export const organization = pgTable("organization", { +export const organization = pgTable('organization', { id: text('id').primaryKey(), name: text('name').notNull(), slug: text('slug').notNull(), @@ -298,23 +301,31 @@ export const organization = pgTable("organization", { updatedAt: timestamp('updated_at').defaultNow().notNull(), }) -export const member = pgTable("member", { +export const member = pgTable('member', { id: text('id').primaryKey(), - userId: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }), - organizationId: text('organization_id').notNull().references(() => organization.id, { onDelete: 'cascade' }), + userId: text('user_id') + .notNull() + .references(() => user.id, { onDelete: 'cascade' }), + organizationId: text('organization_id') + .notNull() + .references(() => organization.id, { onDelete: 'cascade' }), role: text('role').notNull(), - createdAt: timestamp('created_at').defaultNow().notNull() + createdAt: timestamp('created_at').defaultNow().notNull(), }) -export const invitation = pgTable("invitation", { +export const invitation = pgTable('invitation', { id: text('id').primaryKey(), email: text('email').notNull(), - inviterId: text('inviter_id').notNull().references(() => user.id, { onDelete: 'cascade' }), - organizationId: text('organization_id').notNull().references(() => organization.id, { onDelete: 'cascade' }), + inviterId: text('inviter_id') + .notNull() + .references(() => user.id, { onDelete: 'cascade' }), + organizationId: text('organization_id') + .notNull() + .references(() => organization.id, { onDelete: 'cascade' }), role: text('role').notNull(), status: text('status').notNull(), expiresAt: timestamp('expires_at').notNull(), - createdAt: timestamp('created_at').defaultNow().notNull() + createdAt: timestamp('created_at').defaultNow().notNull(), }) export const workspace = pgTable('workspace', { @@ -347,4 +358,4 @@ export const workspaceMember = pgTable( userIdIdx: uniqueIndex('user_workspace_idx').on(table.userId, table.workspaceId), } } -) \ No newline at end of file +) diff --git a/sim/drizzle.config.ts b/apps/sim/drizzle.config.ts similarity index 100% rename from sim/drizzle.config.ts rename to apps/sim/drizzle.config.ts diff --git a/sim/executor/__test-utils__/mock-dependencies.ts b/apps/sim/executor/__test-utils__/mock-dependencies.ts similarity index 100% rename from sim/executor/__test-utils__/mock-dependencies.ts rename to apps/sim/executor/__test-utils__/mock-dependencies.ts diff --git a/sim/executor/__test-utils__/test-executor.ts b/apps/sim/executor/__test-utils__/test-executor.ts similarity index 100% rename from sim/executor/__test-utils__/test-executor.ts rename to apps/sim/executor/__test-utils__/test-executor.ts diff --git a/sim/executor/handlers/agent/agent-handler.test.ts b/apps/sim/executor/handlers/agent/agent-handler.test.ts similarity index 95% rename from sim/executor/handlers/agent/agent-handler.test.ts rename to apps/sim/executor/handlers/agent/agent-handler.test.ts index 4e5067cc5..3450f58ff 100644 --- a/sim/executor/handlers/agent/agent-handler.test.ts +++ b/apps/sim/executor/handlers/agent/agent-handler.test.ts @@ -14,21 +14,21 @@ vi.mock('@/lib/environment', () => ({ isProd: vi.fn().mockReturnValue(false), isDev: vi.fn().mockReturnValue(true), isTest: vi.fn().mockReturnValue(false), - getCostMultiplier: vi.fn().mockReturnValue(1) + getCostMultiplier: vi.fn().mockReturnValue(1), })) vi.mock('@/providers/utils', () => ({ getProviderFromModel: vi.fn().mockReturnValue('mock-provider'), transformBlockTool: vi.fn(), - getBaseModelProviders: vi.fn().mockReturnValue({ openai: {}, anthropic: {} }) + getBaseModelProviders: vi.fn().mockReturnValue({ openai: {}, anthropic: {} }), })) vi.mock('@/blocks', () => ({ - getAllBlocks: vi.fn().mockReturnValue([]) + getAllBlocks: vi.fn().mockReturnValue([]), })) vi.mock('@/tools', () => ({ - executeTool: vi.fn() + executeTool: vi.fn(), })) global.fetch = vi.fn() @@ -96,7 +96,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'application/json' if (name === 'X-Execution-Data') return null return null - } + }, }, json: () => Promise.resolve({ @@ -222,7 +222,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'application/json' if (name === 'X-Execution-Data') return null return null - } + }, }, json: () => Promise.resolve({ @@ -660,7 +660,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'application/json' if (name === 'X-Execution-Data') return null return null - } + }, }, json: () => Promise.resolve({ @@ -701,7 +701,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'application/json' if (name === 'X-Execution-Data') return null return null - } + }, }, json: () => Promise.resolve({ @@ -767,7 +767,7 @@ describe('AgentBlockHandler', () => { read: vi.fn().mockResolvedValue({ done: true, value: undefined }), }), } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -776,7 +776,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'text/event-stream' if (name === 'X-Execution-Data') return null return null - } + }, }, body: mockStreamBody, }) @@ -791,12 +791,12 @@ describe('AgentBlockHandler', () => { mockContext.stream = true mockContext.selectedOutputIds = [mockBlock.id] - + const result = await handler.execute(mockBlock, inputs, mockContext) - + expect(result).toHaveProperty('stream') expect(result).toHaveProperty('execution') - + expect((result as StreamingExecution).execution).toHaveProperty('success', true) expect((result as StreamingExecution).execution).toHaveProperty('output') expect((result as StreamingExecution).execution.output).toHaveProperty('response') @@ -809,7 +809,7 @@ describe('AgentBlockHandler', () => { read: vi.fn().mockResolvedValue({ done: true, value: undefined }), }), } - + const mockExecutionData = { success: true, output: { @@ -817,17 +817,24 @@ describe('AgentBlockHandler', () => { content: '', model: 'mock-model', tokens: { prompt: 10, completion: 20, total: 30 }, - } + }, }, logs: [ - { blockId: 'some-id', blockType: 'agent', startedAt: new Date().toISOString(), endedAt: new Date().toISOString(), durationMs: 100, success: true } + { + blockId: 'some-id', + blockType: 'agent', + startedAt: new Date().toISOString(), + endedAt: new Date().toISOString(), + durationMs: 100, + success: true, + }, ], metadata: { startTime: new Date().toISOString(), duration: 100, - } + }, } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -836,7 +843,7 @@ describe('AgentBlockHandler', () => { if (name === 'Content-Type') return 'text/event-stream' if (name === 'X-Execution-Data') return JSON.stringify(mockExecutionData) return null - } + }, }, body: mockStreamBody, }) @@ -851,12 +858,12 @@ describe('AgentBlockHandler', () => { mockContext.stream = true mockContext.selectedOutputIds = [mockBlock.id] - + const result = await handler.execute(mockBlock, inputs, mockContext) - + expect(result).toHaveProperty('stream') expect(result).toHaveProperty('execution') - + expect((result as StreamingExecution).execution.success).toBe(true) expect((result as StreamingExecution).execution.output.response.model).toBe('mock-model') const logs = (result as StreamingExecution).execution.logs @@ -870,33 +877,34 @@ describe('AgentBlockHandler', () => { const mockStreamObj = new ReadableStream({ start(controller) { controller.close() - } + }, }) mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, headers: { - get: (name: string) => name === 'Content-Type' ? 'application/json' : null + get: (name: string) => (name === 'Content-Type' ? 'application/json' : null), }, - json: () => Promise.resolve({ - stream: {}, // Serialized stream placeholder - execution: { - success: true, - output: { - response: { - content: 'Test streaming content', - model: 'gpt-4o', - tokens: { prompt: 10, completion: 5, total: 15 }, - } + json: () => + Promise.resolve({ + stream: {}, // Serialized stream placeholder + execution: { + success: true, + output: { + response: { + content: 'Test streaming content', + model: 'gpt-4o', + tokens: { prompt: 10, completion: 5, total: 15 }, + }, + }, + logs: [], + metadata: { + startTime: new Date().toISOString(), + duration: 150, + }, }, - logs: [], - metadata: { - startTime: new Date().toISOString(), - duration: 150 - } - } - }) + }), }) }) @@ -909,14 +917,16 @@ describe('AgentBlockHandler', () => { mockContext.stream = true mockContext.selectedOutputIds = [mockBlock.id] - + const result = await handler.execute(mockBlock, inputs, mockContext) - + expect(result).toHaveProperty('stream') expect(result).toHaveProperty('execution') - + expect((result as StreamingExecution).execution.success).toBe(true) - expect((result as StreamingExecution).execution.output.response.content).toBe('Test streaming content') + expect((result as StreamingExecution).execution.output.response.content).toBe( + 'Test streaming content' + ) expect((result as StreamingExecution).execution.output.response.model).toBe('gpt-4o') }) }) diff --git a/sim/executor/handlers/agent/agent-handler.ts b/apps/sim/executor/handlers/agent/agent-handler.ts similarity index 90% rename from sim/executor/handlers/agent/agent-handler.ts rename to apps/sim/executor/handlers/agent/agent-handler.ts index 184d1bd4a..374342d22 100644 --- a/sim/executor/handlers/agent/agent-handler.ts +++ b/apps/sim/executor/handlers/agent/agent-handler.ts @@ -4,7 +4,7 @@ import { BlockOutput } from '@/blocks/types' import { getProviderFromModel, transformBlockTool } from '@/providers/utils' import { SerializedBlock } from '@/serializer/types' import { executeTool } from '@/tools' -import { getToolAsync, getTool } from '@/tools/utils' +import { getTool, getToolAsync } from '@/tools/utils' import { BlockHandler, ExecutionContext, StreamingExecution } from '../../types' const logger = createLogger('AgentBlockHandler') @@ -150,38 +150,41 @@ export class AgentBlockHandler implements BlockHandler { ) ).filter((t: any): t is NonNullable => t !== null) : [] - + // Check if streaming is requested and this block is selected for streaming - const isBlockSelectedForOutput = context.selectedOutputIds?.some(outputId => { - // First check for direct match (if the entire outputId is the blockId) - if (outputId === block.id) { - logger.info(`Direct match found for block ${block.id} in selected outputs`) - return true - } - - // Then try parsing the blockId from the blockId_path format - const firstUnderscoreIndex = outputId.indexOf('_') - if (firstUnderscoreIndex !== -1) { - const blockId = outputId.substring(0, firstUnderscoreIndex) - const isMatch = blockId === block.id - if (isMatch) { - logger.info(`Path match found for block ${block.id} in selected outputs (from ${outputId})`) + const isBlockSelectedForOutput = + context.selectedOutputIds?.some((outputId) => { + // First check for direct match (if the entire outputId is the blockId) + if (outputId === block.id) { + logger.info(`Direct match found for block ${block.id} in selected outputs`) + return true } - return isMatch - } - return false - }) ?? false - + + // Then try parsing the blockId from the blockId_path format + const firstUnderscoreIndex = outputId.indexOf('_') + if (firstUnderscoreIndex !== -1) { + const blockId = outputId.substring(0, firstUnderscoreIndex) + const isMatch = blockId === block.id + if (isMatch) { + logger.info( + `Path match found for block ${block.id} in selected outputs (from ${outputId})` + ) + } + return isMatch + } + return false + }) ?? false + // Check if this block has any outgoing connections - const hasOutgoingConnections = context.edges?.some(edge => edge.source === block.id) ?? false - + const hasOutgoingConnections = context.edges?.some((edge) => edge.source === block.id) ?? false + // Determine if we should use streaming for this block - const shouldUseStreaming = context.stream && - isBlockSelectedForOutput && - !hasOutgoingConnections - + const shouldUseStreaming = context.stream && isBlockSelectedForOutput && !hasOutgoingConnections + if (shouldUseStreaming) { - logger.info(`Block ${block.id} will use streaming response (selected for output with no outgoing connections)`) + logger.info( + `Block ${block.id} will use streaming response (selected for output with no outgoing connections)` + ) } // Debug request before sending to provider @@ -245,19 +248,19 @@ export class AgentBlockHandler implements BlockHandler { const contentType = response.headers.get('Content-Type') if (contentType?.includes('text/event-stream')) { logger.info(`Received streaming response for block ${block.id}`) - + // Ensure we have a valid body stream if (!response.body) { throw new Error(`No response body in streaming response for block ${block.id}`) } - + // Check if we have execution data in the header const executionDataHeader = response.headers.get('X-Execution-Data') if (executionDataHeader) { try { // Parse the execution data from the header const executionData = JSON.parse(executionDataHeader) - + // Add block-specific data to the execution logs if needed if (executionData && executionData.logs) { for (const log of executionData.logs) { @@ -266,7 +269,7 @@ export class AgentBlockHandler implements BlockHandler { if (!log.blockType && block.metadata?.id) log.blockType = block.metadata.id } } - + // Add block metadata to the execution data if missing if (executionData.output?.response) { // Ensure model and block info is set @@ -279,15 +282,15 @@ export class AgentBlockHandler implements BlockHandler { if (!executionData.blockId) { executionData.blockId = block.id } - + // Add explicit streaming flag to make it easier to identify streaming executions executionData.isStreaming = true } - + // Return both the stream and the execution data as separate properties const streamingExecution: StreamingExecution = { stream: response.body, - execution: executionData + execution: executionData, } return streamingExecution } catch (error) { @@ -295,7 +298,7 @@ export class AgentBlockHandler implements BlockHandler { // Continue with just the stream if there's an error } } - + // No execution data in header, just return the stream // Create a minimal StreamingExecution with empty execution data const minimalExecution: StreamingExecution = { @@ -306,34 +309,38 @@ export class AgentBlockHandler implements BlockHandler { logs: [], metadata: { duration: 0, - startTime: new Date().toISOString() - } - } + startTime: new Date().toISOString(), + }, + }, } return minimalExecution } - + // Check if we have a combined response with both stream and execution data const result = await response.json() - + if (result && typeof result === 'object' && 'stream' in result && 'execution' in result) { logger.info(`Received combined streaming response for block ${block.id}`) - + // Get the stream as a ReadableStream (need to convert from serialized format) const stream = new ReadableStream({ start(controller) { // Since stream was serialized as JSON, we need to reconstruct it // For now, we'll just use a placeholder message const encoder = new TextEncoder() - controller.enqueue(encoder.encode('Stream data cannot be serialized as JSON. You will need to return a proper stream.')) + controller.enqueue( + encoder.encode( + 'Stream data cannot be serialized as JSON. You will need to return a proper stream.' + ) + ) controller.close() - } + }, }) - + // Return both in a format the executor can handle const streamingExecution: StreamingExecution = { stream, - execution: result.execution + execution: result.execution, } return streamingExecution } diff --git a/sim/executor/handlers/api/api-handler.test.ts b/apps/sim/executor/handlers/api/api-handler.test.ts similarity index 99% rename from sim/executor/handlers/api/api-handler.test.ts rename to apps/sim/executor/handlers/api/api-handler.test.ts index 009e8fdd5..e77e0735c 100644 --- a/sim/executor/handlers/api/api-handler.test.ts +++ b/apps/sim/executor/handlers/api/api-handler.test.ts @@ -2,10 +2,10 @@ import '../../__test-utils__/mock-dependencies' import { beforeEach, describe, expect, it, Mock, vi } from 'vitest' import { SerializedBlock } from '@/serializer/types' import { executeTool } from '@/tools' +import { ToolConfig } from '@/tools/types' import { getTool } from '@/tools/utils' import { ExecutionContext } from '../../types' import { ApiBlockHandler } from './api-handler' -import { ToolConfig } from '@/tools/types' const mockGetTool = vi.mocked(getTool) const mockExecuteTool = executeTool as Mock @@ -55,8 +55,8 @@ describe('ApiBlockHandler', () => { url: 'https://example.com/api', method: 'POST', headers: () => ({ 'Content-Type': 'application/json' }), - body: (params) => params - } + body: (params) => params, + }, } // Reset mocks using vi diff --git a/sim/executor/handlers/api/api-handler.ts b/apps/sim/executor/handlers/api/api-handler.ts similarity index 100% rename from sim/executor/handlers/api/api-handler.ts rename to apps/sim/executor/handlers/api/api-handler.ts diff --git a/sim/executor/handlers/condition/condition-handler.test.ts b/apps/sim/executor/handlers/condition/condition-handler.test.ts similarity index 99% rename from sim/executor/handlers/condition/condition-handler.test.ts rename to apps/sim/executor/handlers/condition/condition-handler.test.ts index 695f798d2..97d8ed664 100644 --- a/sim/executor/handlers/condition/condition-handler.test.ts +++ b/apps/sim/executor/handlers/condition/condition-handler.test.ts @@ -101,7 +101,7 @@ describe('ConditionBlockHandler', () => { ], ]), blockLogs: [], - metadata: {}, + metadata: { duration: 0 }, environmentVariables: {}, // Now set the context's env vars decisions: { router: new Map(), condition: new Map() }, loopIterations: new Map(), @@ -109,6 +109,7 @@ describe('ConditionBlockHandler', () => { executedBlocks: new Set([mockSourceBlock.id]), activeExecutionPath: new Set(), workflow: mockWorkflow as SerializedWorkflow, + completedLoops: new Set(), } // Reset mocks using vi diff --git a/sim/executor/handlers/condition/condition-handler.ts b/apps/sim/executor/handlers/condition/condition-handler.ts similarity index 100% rename from sim/executor/handlers/condition/condition-handler.ts rename to apps/sim/executor/handlers/condition/condition-handler.ts diff --git a/sim/executor/handlers/evaluator/evaluator-handler.test.ts b/apps/sim/executor/handlers/evaluator/evaluator-handler.test.ts similarity index 99% rename from sim/executor/handlers/evaluator/evaluator-handler.test.ts rename to apps/sim/executor/handlers/evaluator/evaluator-handler.test.ts index e499042c7..8fa7b7d18 100644 --- a/sim/executor/handlers/evaluator/evaluator-handler.test.ts +++ b/apps/sim/executor/handlers/evaluator/evaluator-handler.test.ts @@ -51,7 +51,7 @@ describe('EvaluatorBlockHandler', () => { // Default mock implementations mockGetProviderFromModel.mockReturnValue('openai') - + // Set up fetch mock to return a successful response mockFetch.mockImplementation(() => { return Promise.resolve({ @@ -99,14 +99,14 @@ describe('EvaluatorBlockHandler', () => { expect(mockGetProviderFromModel).toHaveBeenCalledWith('gpt-4o') expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), + expect.any(String), expect.objectContaining({ method: 'POST', headers: expect.any(Object), body: expect.any(String), }) ) - + // Verify the request body contains the expected data const fetchCallArgs = mockFetch.mock.calls[0] const requestBody = JSON.parse(fetchCallArgs[1].body) @@ -123,11 +123,11 @@ describe('EvaluatorBlockHandler', () => { }, required: ['score1', 'score2'], additionalProperties: false, - } + }, }), temperature: 0.1, }) - + expect(result).toEqual(expectedOutput) }) @@ -137,7 +137,7 @@ describe('EvaluatorBlockHandler', () => { content: JSON.stringify(contentObj), metrics: [{ name: 'clarity', description: 'Clarity score', range: { min: 1, max: 5 } }], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -169,7 +169,7 @@ describe('EvaluatorBlockHandler', () => { { name: 'completeness', description: 'Data completeness', range: { min: 0, max: 1 } }, ], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -198,7 +198,7 @@ describe('EvaluatorBlockHandler', () => { content: 'Test content', metrics: [{ name: 'quality', description: 'Quality score', range: { min: 1, max: 10 } }], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -223,7 +223,7 @@ describe('EvaluatorBlockHandler', () => { content: 'Test content', metrics: [{ name: 'score', description: 'Score', range: { min: 0, max: 5 } }], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -251,7 +251,7 @@ describe('EvaluatorBlockHandler', () => { { name: 'fluency', description: 'Flu', range: { min: 0, max: 1 } }, ], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -276,7 +276,7 @@ describe('EvaluatorBlockHandler', () => { content: 'Test', metrics: [{ name: 'CamelCaseScore', description: 'Desc', range: { min: 0, max: 10 } }], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -304,7 +304,7 @@ describe('EvaluatorBlockHandler', () => { { name: 'missingScore', description: 'Desc2', range: { min: 0, max: 5 } }, ], } - + mockFetch.mockImplementationOnce(() => { return Promise.resolve({ ok: true, @@ -324,10 +324,10 @@ describe('EvaluatorBlockHandler', () => { expect((result as any).response.presentscore).toBe(4) expect((result as any).response.missingscore).toBe(0) }) - + it('should handle server error responses', async () => { const inputs = { content: 'Test error handling.' } - + // Override fetch mock to return an error mockFetch.mockImplementationOnce(() => { return Promise.resolve({ diff --git a/sim/executor/handlers/evaluator/evaluator-handler.ts b/apps/sim/executor/handlers/evaluator/evaluator-handler.ts similarity index 98% rename from sim/executor/handlers/evaluator/evaluator-handler.ts rename to apps/sim/executor/handlers/evaluator/evaluator-handler.ts index bc8975a09..69f988322 100644 --- a/sim/executor/handlers/evaluator/evaluator-handler.ts +++ b/apps/sim/executor/handlers/evaluator/evaluator-handler.ts @@ -104,7 +104,7 @@ export class EvaluatorBlockHandler implements BlockHandler { try { const baseUrl = process.env.NEXT_PUBLIC_APP_URL || '' const url = new URL('/api/providers', baseUrl) - + // Make sure we force JSON output in the request const providerRequest = { provider: providerId, @@ -114,14 +114,15 @@ export class EvaluatorBlockHandler implements BlockHandler { context: JSON.stringify([ { role: 'user', - content: 'Please evaluate the content provided in the system prompt. Return ONLY a valid JSON with metric scores.', + content: + 'Please evaluate the content provided in the system prompt. Return ONLY a valid JSON with metric scores.', }, ]), temperature: inputs.temperature || 0, apiKey: inputs.apiKey, workflowId: context.workflowId, } - + const response = await fetch(url.toString(), { method: 'POST', headers: { diff --git a/sim/executor/handlers/function/function-handler.test.ts b/apps/sim/executor/handlers/function/function-handler.test.ts similarity index 98% rename from sim/executor/handlers/function/function-handler.test.ts rename to apps/sim/executor/handlers/function/function-handler.test.ts index cdd8edb54..bf2b5e4d3 100644 --- a/sim/executor/handlers/function/function-handler.test.ts +++ b/apps/sim/executor/handlers/function/function-handler.test.ts @@ -30,13 +30,14 @@ describe('FunctionBlockHandler', () => { workflowId: 'test-workflow-id', blockStates: new Map(), blockLogs: [], - metadata: {}, + metadata: { duration: 0 }, environmentVariables: {}, decisions: { router: new Map(), condition: new Map() }, loopIterations: new Map(), loopItems: new Map(), executedBlocks: new Set(), activeExecutionPath: new Set(), + completedLoops: new Set(), } // Reset mocks using vi diff --git a/sim/executor/handlers/function/function-handler.ts b/apps/sim/executor/handlers/function/function-handler.ts similarity index 100% rename from sim/executor/handlers/function/function-handler.ts rename to apps/sim/executor/handlers/function/function-handler.ts diff --git a/sim/executor/handlers/generic/generic-handler.test.ts b/apps/sim/executor/handlers/generic/generic-handler.test.ts similarity index 98% rename from sim/executor/handlers/generic/generic-handler.test.ts rename to apps/sim/executor/handlers/generic/generic-handler.test.ts index 2ea9e28da..f5f9e3120 100644 --- a/sim/executor/handlers/generic/generic-handler.test.ts +++ b/apps/sim/executor/handlers/generic/generic-handler.test.ts @@ -3,10 +3,10 @@ import { beforeEach, describe, expect, it, Mock, vi } from 'vitest' import { BlockOutput } from '@/blocks/types' import { SerializedBlock } from '@/serializer/types' import { executeTool } from '@/tools' +import { ToolConfig } from '@/tools/types' import { getTool } from '@/tools/utils' import { ExecutionContext } from '../../types' import { GenericBlockHandler } from './generic-handler' -import { ToolConfig } from '@/tools/types' const mockGetTool = vi.mocked(getTool) const mockExecuteTool = executeTool as Mock @@ -54,13 +54,13 @@ describe('GenericBlockHandler', () => { url: 'https://example.com/api', method: 'POST', headers: () => ({ 'Content-Type': 'application/json' }), - body: (params) => params - } + body: (params) => params, + }, } // Reset mocks using vi vi.clearAllMocks() - + // Set up mockGetTool to return mockTool mockGetTool.mockImplementation((toolId) => { if (toolId === 'some_custom_tool') { @@ -98,7 +98,7 @@ describe('GenericBlockHandler', () => { it('should throw error if the associated tool is not found', async () => { const inputs = { param1: 'value' } - + // Override mock to return undefined for this test mockGetTool.mockImplementation(() => undefined) diff --git a/sim/executor/handlers/generic/generic-handler.ts b/apps/sim/executor/handlers/generic/generic-handler.ts similarity index 100% rename from sim/executor/handlers/generic/generic-handler.ts rename to apps/sim/executor/handlers/generic/generic-handler.ts diff --git a/sim/executor/handlers/index.ts b/apps/sim/executor/handlers/index.ts similarity index 100% rename from sim/executor/handlers/index.ts rename to apps/sim/executor/handlers/index.ts diff --git a/sim/executor/handlers/router/router-handler.test.ts b/apps/sim/executor/handlers/router/router-handler.test.ts similarity index 97% rename from sim/executor/handlers/router/router-handler.test.ts rename to apps/sim/executor/handlers/router/router-handler.test.ts index 8657f3056..d4dc1a59d 100644 --- a/sim/executor/handlers/router/router-handler.test.ts +++ b/apps/sim/executor/handlers/router/router-handler.test.ts @@ -82,7 +82,7 @@ describe('RouterBlockHandler', () => { // Default mock implementations mockGetProviderFromModel.mockReturnValue('openai') mockGenerateRouterPrompt.mockReturnValue('Generated System Prompt') - + // Set up fetch mock to return a successful response mockFetch.mockImplementation(() => { return Promise.resolve({ @@ -118,9 +118,9 @@ describe('RouterBlockHandler', () => { type: 'target', title: 'Option A', description: 'Choose A', - subBlocks: { + subBlocks: { p: 'a', - systemPrompt: '' + systemPrompt: '', }, currentState: undefined, }, @@ -129,9 +129,9 @@ describe('RouterBlockHandler', () => { type: 'target', title: 'Option B', description: 'Choose B', - subBlocks: { + subBlocks: { p: 'b', - systemPrompt: '' + systemPrompt: '', }, currentState: undefined, }, @@ -155,14 +155,14 @@ describe('RouterBlockHandler', () => { expect(mockGenerateRouterPrompt).toHaveBeenCalledWith(inputs.prompt, expectedTargetBlocks) expect(mockGetProviderFromModel).toHaveBeenCalledWith('gpt-4o') expect(mockFetch).toHaveBeenCalledWith( - expect.any(String), + expect.any(String), expect.objectContaining({ method: 'POST', headers: expect.any(Object), body: expect.any(String), }) ) - + // Verify the request body contains the expected data const fetchCallArgs = mockFetch.mock.calls[0] const requestBody = JSON.parse(fetchCallArgs[1].body) @@ -173,7 +173,7 @@ describe('RouterBlockHandler', () => { context: JSON.stringify([{ role: 'user', content: 'Choose the best option.' }]), temperature: 0.5, }) - + expect(result).toEqual(expectedOutput) }) @@ -190,7 +190,7 @@ describe('RouterBlockHandler', () => { it('should throw error if LLM response is not a valid target block ID', async () => { const inputs = { prompt: 'Test' } - + // Override fetch mock to return an invalid block ID mockFetch.mockImplementationOnce(() => { return Promise.resolve({ @@ -217,18 +217,18 @@ describe('RouterBlockHandler', () => { await handler.execute(mockBlock, inputs, mockContext) expect(mockGetProviderFromModel).toHaveBeenCalledWith('gpt-4o') - + const fetchCallArgs = mockFetch.mock.calls[0] const requestBody = JSON.parse(fetchCallArgs[1].body) expect(requestBody).toMatchObject({ - model: 'gpt-4o', - temperature: 0 + model: 'gpt-4o', + temperature: 0, }) }) - + it('should handle server error responses', async () => { const inputs = { prompt: 'Test error handling.' } - + // Override fetch mock to return an error mockFetch.mockImplementationOnce(() => { return Promise.resolve({ diff --git a/sim/executor/handlers/router/router-handler.ts b/apps/sim/executor/handlers/router/router-handler.ts similarity index 92% rename from sim/executor/handlers/router/router-handler.ts rename to apps/sim/executor/handlers/router/router-handler.ts index 2bdd5cd00..a7a28057f 100644 --- a/sim/executor/handlers/router/router-handler.ts +++ b/apps/sim/executor/handlers/router/router-handler.ts @@ -40,7 +40,7 @@ export class RouterBlockHandler implements BlockHandler { try { const baseUrl = process.env.NEXT_PUBLIC_APP_URL || '' const url = new URL('/api/providers', baseUrl) - + // Create the provider request with proper message formatting const messages = [{ role: 'user', content: routerConfig.prompt }] const systemPrompt = generateRouterPrompt(routerConfig.prompt, targetBlocks) @@ -53,7 +53,7 @@ export class RouterBlockHandler implements BlockHandler { apiKey: routerConfig.apiKey, workflowId: context.workflowId, } - + const response = await fetch(url.toString(), { method: 'POST', headers: { @@ -77,13 +77,14 @@ export class RouterBlockHandler implements BlockHandler { } const result = await response.json() - + const chosenBlockId = result.content.trim().toLowerCase() const chosenBlock = targetBlocks?.find((b) => b.id === chosenBlockId) if (!chosenBlock) { - logger.error(`Invalid routing decision. Response content: "${result.content}", available blocks:`, - targetBlocks?.map(b => ({ id: b.id, title: b.title })) || [] + logger.error( + `Invalid routing decision. Response content: "${result.content}", available blocks:`, + targetBlocks?.map((b) => ({ id: b.id, title: b.title })) || [] ) throw new Error(`Invalid routing decision: ${chosenBlockId}`) } @@ -128,21 +129,20 @@ export class RouterBlockHandler implements BlockHandler { if (!targetBlock) { throw new Error(`Target block ${conn.target} not found`) } - + // Extract system prompt for agent blocks let systemPrompt = '' if (targetBlock.metadata?.id === 'agent') { // Try to get system prompt from different possible locations - systemPrompt = targetBlock.config?.params?.systemPrompt || - targetBlock.inputs?.systemPrompt || - '' - + systemPrompt = + targetBlock.config?.params?.systemPrompt || targetBlock.inputs?.systemPrompt || '' + // If system prompt is still not found, check if we can extract it from inputs if (!systemPrompt && targetBlock.inputs) { systemPrompt = targetBlock.inputs.systemPrompt || '' } } - + return { id: targetBlock.id, type: targetBlock.metadata?.id, @@ -150,7 +150,7 @@ export class RouterBlockHandler implements BlockHandler { description: targetBlock.metadata?.description, subBlocks: { ...targetBlock.config.params, - systemPrompt: systemPrompt + systemPrompt: systemPrompt, }, currentState: context.blockStates.get(targetBlock.id)?.output, } diff --git a/sim/executor/index.test.ts b/apps/sim/executor/index.test.ts similarity index 98% rename from sim/executor/index.test.ts rename to apps/sim/executor/index.test.ts index e0813b3a9..6fa2a8e68 100644 --- a/sim/executor/index.test.ts +++ b/apps/sim/executor/index.test.ts @@ -387,13 +387,20 @@ describe('Executor', () => { const result = await executor.execute('test-workflow-id') // Verify the result has the expected structure - expect(result).toHaveProperty('success') - expect(result).toHaveProperty('output') - expect(result.output).toHaveProperty('response') + // Check if result is a StreamingExecution or ExecutionResult + if ('success' in result) { + expect(result).toHaveProperty('success') + expect(result).toHaveProperty('output') + expect(result.output).toHaveProperty('response') - // Our mocked implementation results in a false success value - // In real usage, this would be true for successful executions - expect(typeof result.success).toBe('boolean') + // Our mocked implementation results in a false success value + // In real usage, this would be true for successful executions + expect(typeof result.success).toBe('boolean') + } else { + // Handle StreamingExecution case + expect(result).toHaveProperty('stream') + expect(typeof result.stream).toBe('object') + } }) }) diff --git a/sim/executor/index.ts b/apps/sim/executor/index.ts similarity index 94% rename from sim/executor/index.ts rename to apps/sim/executor/index.ts index e0c3adf77..13ec98b8e 100644 --- a/sim/executor/index.ts +++ b/apps/sim/executor/index.ts @@ -35,13 +35,13 @@ function trackWorkflowTelemetry(eventName: string, data: Record) { // Add timestamp and sanitize the data to avoid circular references const safeData = { ...data, - timestamp: Date.now() + timestamp: Date.now(), } - + // Track the event through the global telemetry function window.__SIM_TRACK_EVENT(eventName, { category: 'workflow', - ...safeData + ...safeData, }) } } @@ -63,18 +63,20 @@ export class Executor { private actualWorkflow: SerializedWorkflow constructor( - private workflowParam: SerializedWorkflow | { - workflow: SerializedWorkflow, - currentBlockStates?: Record, - envVarValues?: Record, - workflowInput?: any, - workflowVariables?: Record, - contextExtensions?: { - stream?: boolean, - selectedOutputIds?: string[], - edges?: Array<{source: string, target: string}> - } - }, + private workflowParam: + | SerializedWorkflow + | { + workflow: SerializedWorkflow + currentBlockStates?: Record + envVarValues?: Record + workflowInput?: any + workflowVariables?: Record + contextExtensions?: { + stream?: boolean + selectedOutputIds?: string[] + edges?: Array<{ source: string; target: string }> + } + }, private initialBlockStates: Record = {}, private environmentVariables: Record = {}, workflowInput?: any, @@ -88,16 +90,16 @@ export class Executor { this.environmentVariables = options.envVarValues || {} this.workflowInput = options.workflowInput || {} this.workflowVariables = options.workflowVariables || {} - + // Store context extensions for streaming and output selection if (options.contextExtensions) { this.contextExtensions = options.contextExtensions - + if (this.contextExtensions.stream) { logger.info('Executor initialized with streaming enabled', { hasSelectedOutputIds: Array.isArray(this.contextExtensions.selectedOutputIds), - selectedOutputCount: Array.isArray(this.contextExtensions.selectedOutputIds) - ? this.contextExtensions.selectedOutputIds.length + selectedOutputCount: Array.isArray(this.contextExtensions.selectedOutputIds) + ? this.contextExtensions.selectedOutputIds.length : 0, selectedOutputIds: this.contextExtensions.selectedOutputIds || [], }) @@ -105,7 +107,7 @@ export class Executor { } } else { this.actualWorkflow = workflowParam - + if (workflowInput) { this.workflowInput = workflowInput logger.info('[Executor] Using workflow input:', JSON.stringify(this.workflowInput, null, 2)) @@ -154,7 +156,7 @@ export class Executor { workflowId, blockCount: this.actualWorkflow.blocks.length, connectionCount: this.actualWorkflow.connections.length, - startTime: startTime.toISOString() + startTime: startTime.toISOString(), }) this.validateWorkflow() @@ -208,66 +210,80 @@ export class Executor { hasMoreLayers = false } else { const outputs = await this.executeLayer(nextLayer, context) - + // Check if we got a StreamingExecution response from any block - const streamingOutput = outputs.find(output => - typeof output === 'object' && output !== null && - 'stream' in output && 'execution' in output + const streamingOutput = outputs.find( + (output) => + typeof output === 'object' && + output !== null && + 'stream' in output && + 'execution' in output ) - + if (streamingOutput) { // This is a combined response with both stream and execution data logger.info('Found combined stream+execution response from block') - + // Incorporate the execution data from the block into our context const executionData = streamingOutput.execution - + // Add any logs from the execution data to our context if (executionData.logs && Array.isArray(executionData.logs)) { context.blockLogs.push(...executionData.logs) } - + // Add proper console entry for the streaming block // This ensures identical formatting between streamed and non-streamed outputs if (executionData.output) { - const blockLog = executionData.logs?.find((log: BlockLog) => log.blockId === executionData.blockId) + const blockLog = executionData.logs?.find( + (log: BlockLog) => log.blockId === executionData.blockId + ) const consoleStore = useConsoleStore.getState() - + // Create a complete console entry with the full output structure, not the raw streaming object const consoleEntry = { output: executionData.output, // Use just the output, not the whole streaming structure durationMs: blockLog?.durationMs || executionData.metadata?.duration || 0, - startedAt: blockLog?.startedAt || executionData.metadata?.startTime || new Date().toISOString(), - endedAt: blockLog?.endedAt || executionData.metadata?.endTime || new Date().toISOString(), + startedAt: + blockLog?.startedAt || + executionData.metadata?.startTime || + new Date().toISOString(), + endedAt: + blockLog?.endedAt || + executionData.metadata?.endTime || + new Date().toISOString(), workflowId: context.workflowId, - timestamp: blockLog?.startedAt || executionData.metadata?.startTime || new Date().toISOString(), + timestamp: + blockLog?.startedAt || + executionData.metadata?.startTime || + new Date().toISOString(), blockId: executionData.blockId, blockName: executionData.blockName || blockLog?.blockName || 'Agent Block', - blockType: executionData.blockType || blockLog?.blockType || 'agent' + blockType: executionData.blockType || blockLog?.blockType || 'agent', } - + // Add to console const newEntry = consoleStore.addConsole(consoleEntry) - + // Save the entryId for potential updates when stream completes const consoleEntryId = newEntry?.id - + // Set up a stream completion handler to update the console with final content if (consoleEntryId && 'stream' in streamingOutput) { // Clone the stream so we don't consume the original one const originalStream = streamingOutput.stream const [contentStream, returnStream] = originalStream.tee() - + // Replace the original stream with our cloned version that will be returned streamingOutput.stream = returnStream - + // Create a reader to process the cloned stream for content collection const reader = contentStream.getReader() const decoder = new TextDecoder() - let fullContent = ''; - + let fullContent = '' + // Process the stream in the background to collect the full content - (async () => { + ;(async () => { try { while (true) { const { done, value } = await reader.read() @@ -281,20 +297,22 @@ export class Executor { ...executionData.output, response: { ...executionData.output.response, - content: fullContent - } + content: fullContent, + }, } - + // Update the console UI with the final content consoleStore.updateConsole(consoleEntryId, { output: updatedOutput }) - - // Update the execution data itself with the final content + + // Update the execution data itself with the final content // so that when logs are persisted, they have the complete content executionData.output.response.content = fullContent // If there's a block log for this execution, update it with the final content if (executionData.blockId) { - const blockLog = context.blockLogs.find(log => log.blockId === executionData.blockId) + const blockLog = context.blockLogs.find( + (log) => log.blockId === executionData.blockId + ) if (blockLog?.output?.response) { blockLog.output.response.content = fullContent } @@ -306,7 +324,7 @@ export class Executor { })() } } - + // Build a complete execution result with our context's logs const execution: ExecutionResult & { isStreaming: boolean } = { success: executionData.success !== false, @@ -324,21 +342,21 @@ export class Executor { }, isStreaming: true, } - + // Add block metadata to logs if missing if (context.blockLogs.length > 0) { for (const log of context.blockLogs) { if (!log.output) log.output = { response: {} } - + // For blocks matching the streaming block, ensure we add response and content properly if (log.blockId === executionData.blockId) { if (!log.output.response) log.output.response = {} - + // Add the output structure, preferring direct response content if available if (executionData.output?.response) { // Copy all properties from executionData response Object.assign(log.output.response, executionData.output.response) - + // For streaming, we may not have content yet, so we store a placeholder // that will be updated when the stream completes if (!log.output.response.content && executionData.output.response.content) { @@ -348,7 +366,7 @@ export class Executor { } } } - + // Return a properly formed StreamingExecution object return { stream: streamingOutput.stream, @@ -358,11 +376,19 @@ export class Executor { if (outputs.length > 0) { // Filter out StreamingExecution objects (already handled above) - const normalizedOutputs = outputs.filter(output => - !(typeof output === 'object' && output !== null && 'stream' in output && 'execution' in output) + const normalizedOutputs = outputs.filter( + (output) => + !( + typeof output === 'object' && + output !== null && + 'stream' in output && + 'execution' in output + ) ) if (normalizedOutputs.length > 0) { - finalOutput = normalizedOutputs[normalizedOutputs.length - 1] as NormalizedBlockOutput + finalOutput = normalizedOutputs[ + normalizedOutputs.length - 1 + ] as NormalizedBlockOutput } } @@ -392,7 +418,7 @@ export class Executor { executedBlockCount: context.executedBlocks.size, startTime: startTime.toISOString(), endTime: endTime.toISOString(), - success: true + success: true, }) return { @@ -418,7 +444,7 @@ export class Executor { duration: new Date().getTime() - startTime.getTime(), error: this.extractErrorMessage(error), executedBlockCount: context.executedBlocks.size, - blockLogs: context.blockLogs.length + blockLogs: context.blockLogs.length, }) return { @@ -513,7 +539,9 @@ export class Executor { * @throws Error if workflow validation fails */ private validateWorkflow(): void { - const starterBlock = this.actualWorkflow.blocks.find((block) => block.metadata?.id === 'starter') + const starterBlock = this.actualWorkflow.blocks.find( + (block) => block.metadata?.id === 'starter' + ) if (!starterBlock || !starterBlock.enabled) { throw new Error('Workflow must have an enabled starter block') } @@ -605,7 +633,9 @@ export class Executor { } } - const starterBlock = this.actualWorkflow.blocks.find((block) => block.metadata?.id === 'starter') + const starterBlock = this.actualWorkflow.blocks.find( + (block) => block.metadata?.id === 'starter' + ) if (starterBlock) { // Initialize the starter block with the workflow input try { @@ -984,7 +1014,7 @@ export class Executor { blockType: block.metadata?.id || 'unknown', blockName: block.metadata?.name || 'Unnamed Block', inputSize: Object.keys(inputs).length, - startTime: new Date().toISOString() + startTime: new Date().toISOString(), }) // Find the appropriate handler @@ -1040,7 +1070,7 @@ export class Executor { blockType: block.metadata?.id || 'unknown', blockName: block.metadata?.name || 'Unnamed Block', durationMs: Math.round(executionTime), - success: true + success: true, }) return output @@ -1128,7 +1158,7 @@ export class Executor { blockName: block.metadata?.name || 'Unnamed Block', durationMs: blockLog.durationMs, errorType: error.name || 'Error', - errorMessage: this.extractErrorMessage(error) + errorMessage: this.extractErrorMessage(error), }) throw new Error(errorMessage) diff --git a/sim/executor/loops.ts b/apps/sim/executor/loops.ts similarity index 100% rename from sim/executor/loops.ts rename to apps/sim/executor/loops.ts diff --git a/sim/executor/path.ts b/apps/sim/executor/path.ts similarity index 100% rename from sim/executor/path.ts rename to apps/sim/executor/path.ts diff --git a/sim/executor/resolver.test.ts b/apps/sim/executor/resolver.test.ts similarity index 100% rename from sim/executor/resolver.test.ts rename to apps/sim/executor/resolver.test.ts diff --git a/sim/executor/resolver.ts b/apps/sim/executor/resolver.ts similarity index 97% rename from sim/executor/resolver.ts rename to apps/sim/executor/resolver.ts index 0d05b43b1..fbbfc2540 100644 --- a/sim/executor/resolver.ts +++ b/apps/sim/executor/resolver.ts @@ -347,31 +347,35 @@ export class InputResolver { ): string { // Skip resolution for API block body content that looks like XML if ( - currentBlock.metadata?.id === 'api' && - typeof value === 'string' && - ( - // Check if this looks like XML content - (value.includes('')) - ) + currentBlock.metadata?.id === 'api' && + typeof value === 'string' && + // Check if this looks like XML content + (value.includes('') ) { - return value; + return value } const blockMatches = value.match(/<([^>]+)>/g) if (!blockMatches) return value // If we're in an API block body, check each match to see if it looks like XML rather than a reference - if (currentBlock.metadata?.id === 'api' && blockMatches.some(match => { - const innerContent = match.slice(1, -1); - // Patterns that suggest this is XML, not a block reference: - return innerContent.includes(':') || // namespaces like soap:Envelope - innerContent.includes('=') || // attributes like xmlns="http://..." - innerContent.includes(' ') || // any space indicates attributes - innerContent.includes('/') || // self-closing tags - !innerContent.includes('.'); // block refs always have dots - })) { - return value; // Likely XML content, return unchanged + if ( + currentBlock.metadata?.id === 'api' && + blockMatches.some((match) => { + const innerContent = match.slice(1, -1) + // Patterns that suggest this is XML, not a block reference: + return ( + innerContent.includes(':') || // namespaces like soap:Envelope + innerContent.includes('=') || // attributes like xmlns="http://..." + innerContent.includes(' ') || // any space indicates attributes + innerContent.includes('/') || // self-closing tags + !innerContent.includes('.') + ) // block refs always have dots + }) + ) { + return value // Likely XML content, return unchanged } let resolvedValue = value @@ -389,10 +393,10 @@ export class InputResolver { const path = match.slice(1, -1) const [blockRef, ...pathParts] = path.split('.') - + // Skip XML-like tags that have no path parts (not a valid block reference) if (pathParts.length === 0 || blockRef.includes(':') || blockRef.includes(' ')) { - continue; + continue } // Special case for "start" references diff --git a/sim/executor/types.ts b/apps/sim/executor/types.ts similarity index 98% rename from sim/executor/types.ts rename to apps/sim/executor/types.ts index 8c8ca580a..fc16a7956 100644 --- a/sim/executor/types.ts +++ b/apps/sim/executor/types.ts @@ -100,11 +100,11 @@ export interface ExecutionContext { activeExecutionPath: Set // Set of block IDs in the current execution path workflow?: SerializedWorkflow // Reference to the workflow being executed - + // Streaming support and output selection stream?: boolean // Whether to use streaming responses when available selectedOutputIds?: string[] // IDs of blocks selected for streaming output - edges?: Array<{source: string, target: string}> // Workflow edge connections + edges?: Array<{ source: string; target: string }> // Workflow edge connections } /** diff --git a/sim/hooks/use-debounce.ts b/apps/sim/hooks/use-debounce.ts similarity index 100% rename from sim/hooks/use-debounce.ts rename to apps/sim/hooks/use-debounce.ts diff --git a/sim/instrumentation-client.ts b/apps/sim/instrumentation-client.ts similarity index 82% rename from sim/instrumentation-client.ts rename to apps/sim/instrumentation-client.ts index dbc717aaa..41ab84957 100644 --- a/sim/instrumentation-client.ts +++ b/apps/sim/instrumentation-client.ts @@ -1,25 +1,23 @@ /** * Sim Studio Telemetry - Client-side Instrumentation - * + * * This file initializes client-side telemetry when the app loads in the browser. * It respects the user's telemetry preferences stored in localStorage. - * + * */ - // This file configures the initialization of Sentry on the client. // The added config here will be used whenever a users loads a page in their browser. // https://docs.sentry.io/platforms/javascript/guides/nextjs/ - import { - BrowserClient, - getCurrentScope, - makeFetchTransport, - defaultStackParser, breadcrumbsIntegration, - dedupeIntegration, - linkedErrorsIntegration, + BrowserClient, captureRouterTransitionStart, -} from "@sentry/nextjs" + dedupeIntegration, + defaultStackParser, + getCurrentScope, + linkedErrorsIntegration, + makeFetchTransport, +} from '@sentry/nextjs' // Only in production if (typeof window !== 'undefined' && process.env.NODE_ENV === 'production') { @@ -28,14 +26,10 @@ if (typeof window !== 'undefined' && process.env.NODE_ENV === 'production') { environment: process.env.NODE_ENV || 'development', transport: makeFetchTransport, stackParser: defaultStackParser, - integrations: [ - breadcrumbsIntegration(), - dedupeIntegration(), - linkedErrorsIntegration(), - ], + integrations: [breadcrumbsIntegration(), dedupeIntegration(), linkedErrorsIntegration()], beforeSend(event) { if (event.request && typeof event.request === 'object') { - (event.request as any).ip = null + ;(event.request as any).ip = null } return event }, @@ -45,9 +39,8 @@ if (typeof window !== 'undefined' && process.env.NODE_ENV === 'production') { client.init() } -export const onRouterTransitionStart = process.env.NODE_ENV === 'production' - ? captureRouterTransitionStart - : () => {} +export const onRouterTransitionStart = + process.env.NODE_ENV === 'production' ? captureRouterTransitionStart : () => {} if (typeof window !== 'undefined') { const TELEMETRY_STATUS_KEY = 'simstudio-telemetry-status' @@ -73,47 +66,51 @@ if (typeof window !== 'undefined') { function safeSerialize(obj: any): any { if (obj === null || obj === undefined) return null if (typeof obj !== 'object') return obj - + if (Array.isArray(obj)) { - return obj.map(item => safeSerialize(item)) + return obj.map((item) => safeSerialize(item)) } - + const result: Record = {} - + for (const key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { const value = obj[key] - if (value === undefined || value === null || typeof value === 'function' || typeof value === 'symbol') { + if ( + value === undefined || + value === null || + typeof value === 'function' || + typeof value === 'symbol' + ) { continue } - + try { result[key] = safeSerialize(value) } catch (e) { try { result[key] = String(value) - } catch (e2) { - } + } catch (e2) {} } } } - + return result } - (window as any).__SIM_TELEMETRY_ENABLED = telemetryEnabled; - (window as any).__SIM_TRACK_EVENT = (eventName: string, properties?: any) => { + ;(window as any).__SIM_TELEMETRY_ENABLED = telemetryEnabled + ;(window as any).__SIM_TRACK_EVENT = (eventName: string, properties?: any) => { if (!telemetryEnabled) return - + const safeProps = properties || {} - + const payload = { category: 'feature_usage', action: eventName || 'unknown_event', timestamp: Date.now(), - ...safeSerialize(safeProps) + ...safeSerialize(safeProps), } - + fetch('/api/telemetry', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -138,23 +135,24 @@ if (typeof window !== 'undefined') { window.addEventListener('load', () => { performance.mark('sim-studio-loaded') performance.measure('page-load', 'sim-studio-init', 'sim-studio-loaded') - + if (typeof PerformanceObserver !== 'undefined') { const lcpObserver = new PerformanceObserver((list) => { const entries = list.getEntries() - - entries.forEach(entry => { - const value = entry.entryType === 'largest-contentful-paint' - ? (entry as any).startTime - : (entry as any).value || 0 - + + entries.forEach((entry) => { + const value = + entry.entryType === 'largest-contentful-paint' + ? (entry as any).startTime + : (entry as any).value || 0 + // Ensure we have non-null values for all fields const metric = { name: entry.name || 'unknown', value: value || 0, - entryType: entry.entryType || 'unknown' + entryType: entry.entryType || 'unknown', } - + if (telemetryEnabled && telemetryConfig?.clientSide?.enabled) { const safePayload = { category: 'performance', @@ -162,9 +160,9 @@ if (typeof window !== 'undefined') { label: metric.name, value: metric.value, entryType: metric.entryType, - timestamp: Date.now() + timestamp: Date.now(), } - + fetch('/api/telemetry', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -174,18 +172,18 @@ if (typeof window !== 'undefined') { }) } }) - + lcpObserver.disconnect() }) - + const clsObserver = new PerformanceObserver((list) => { const entries = list.getEntries() let clsValue = 0 - - entries.forEach(entry => { + + entries.forEach((entry) => { clsValue += (entry as any).value || 0 }) - + if (telemetryEnabled && telemetryConfig?.clientSide?.enabled) { const safePayload = { category: 'performance', @@ -193,9 +191,9 @@ if (typeof window !== 'undefined') { label: 'CLS', value: clsValue || 0, entryType: 'layout-shift', - timestamp: Date.now() + timestamp: Date.now(), } - + fetch('/api/telemetry', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -204,23 +202,23 @@ if (typeof window !== 'undefined') { // Silently fail if sending metrics fails }) } - + clsObserver.disconnect() }) - + const fidObserver = new PerformanceObserver((list) => { const entries = list.getEntries() - - entries.forEach(entry => { + + entries.forEach((entry) => { const processingStart = (entry as any).processingStart || 0 const startTime = (entry as any).startTime || 0 - + const metric = { name: entry.name || 'unknown', value: processingStart - startTime, - entryType: entry.entryType || 'unknown' + entryType: entry.entryType || 'unknown', } - + if (telemetryEnabled && telemetryConfig?.clientSide?.enabled) { const safePayload = { category: 'performance', @@ -228,9 +226,9 @@ if (typeof window !== 'undefined') { label: 'FID', value: metric.value, entryType: metric.entryType, - timestamp: Date.now() + timestamp: Date.now(), } - + fetch('/api/telemetry', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -240,10 +238,10 @@ if (typeof window !== 'undefined') { }) } }) - + fidObserver.disconnect() }) - + lcpObserver.observe({ type: 'largest-contentful-paint', buffered: true }) clsObserver.observe({ type: 'layout-shift', buffered: true }) fidObserver.observe({ type: 'first-input', buffered: true }) @@ -256,18 +254,18 @@ if (typeof window !== 'undefined') { message: event.error?.message || 'Unknown error', stack: event.error?.stack?.split('\n')[0] || '', url: window.location.pathname, - timestamp: Date.now() + timestamp: Date.now(), } - + const safePayload = { category: 'error', action: 'client_error', label: errorDetails.message, stack: errorDetails.stack, url: errorDetails.url, - timestamp: errorDetails.timestamp + timestamp: errorDetails.timestamp, } - + fetch('/api/telemetry', { method: 'POST', headers: { 'Content-Type': 'application/json' }, @@ -278,4 +276,4 @@ if (typeof window !== 'undefined') { } }) } -} \ No newline at end of file +} diff --git a/sim/instrumentation.ts b/apps/sim/instrumentation.ts similarity index 76% rename from sim/instrumentation.ts rename to apps/sim/instrumentation.ts index 10b12ff43..5a92bdd7f 100644 --- a/sim/instrumentation.ts +++ b/apps/sim/instrumentation.ts @@ -1,22 +1,21 @@ /** * Sim Studio Telemetry - Server-side Instrumentation - * + * * This file can be customized in forked repositories: * - Set TELEMETRY_ENDPOINT env var to your collector * - Modify exporter configuration as needed - * + * * Please maintain ethical telemetry practices if modified. */ - // This file enables OpenTelemetry instrumentation for Next.js // See: https://nextjs.org/docs/app/building-your-application/optimizing/open-telemetry // Set experimental.instrumentationHook = true in next.config.ts to enable this - import { createLogger } from '@/lib/logs/console-logger' -const Sentry = process.env.NODE_ENV === 'production' - ? require('@sentry/nextjs') - : { captureRequestError: () => {} } +const Sentry = + process.env.NODE_ENV === 'production' + ? require('@sentry/nextjs') + : { captureRequestError: () => {} } const logger = createLogger('OtelInstrumentation') @@ -55,7 +54,7 @@ export async function register() { let telemetryConfig try { // Use dynamic import instead of require for ES modules - telemetryConfig = (await import('./telemetry.config.js')).default + telemetryConfig = (await import('./telemetry.config.ts')).default } catch (e) { telemetryConfig = DEFAULT_TELEMETRY_CONFIG } @@ -70,45 +69,54 @@ export async function register() { const { SemanticResourceAttributes } = await import('@opentelemetry/semantic-conventions') const { BatchSpanProcessor } = await import('@opentelemetry/sdk-trace-node') const { OTLPTraceExporter } = await import('@opentelemetry/exporter-trace-otlp-http') - + const exporter = new OTLPTraceExporter({ url: telemetryConfig.endpoint, }) - + const spanProcessor = new BatchSpanProcessor(exporter, { - maxQueueSize: telemetryConfig.batchSettings?.maxQueueSize || DEFAULT_TELEMETRY_CONFIG.batchSettings.maxQueueSize, - maxExportBatchSize: telemetryConfig.batchSettings?.maxExportBatchSize || DEFAULT_TELEMETRY_CONFIG.batchSettings.maxExportBatchSize, - scheduledDelayMillis: telemetryConfig.batchSettings?.scheduledDelayMillis || DEFAULT_TELEMETRY_CONFIG.batchSettings.scheduledDelayMillis, - exportTimeoutMillis: telemetryConfig.batchSettings?.exportTimeoutMillis || DEFAULT_TELEMETRY_CONFIG.batchSettings.exportTimeoutMillis, + maxQueueSize: + telemetryConfig.batchSettings?.maxQueueSize || + DEFAULT_TELEMETRY_CONFIG.batchSettings.maxQueueSize, + maxExportBatchSize: + telemetryConfig.batchSettings?.maxExportBatchSize || + DEFAULT_TELEMETRY_CONFIG.batchSettings.maxExportBatchSize, + scheduledDelayMillis: + telemetryConfig.batchSettings?.scheduledDelayMillis || + DEFAULT_TELEMETRY_CONFIG.batchSettings.scheduledDelayMillis, + exportTimeoutMillis: + telemetryConfig.batchSettings?.exportTimeoutMillis || + DEFAULT_TELEMETRY_CONFIG.batchSettings.exportTimeoutMillis, }) - + const configResource = resourceFromAttributes({ [SemanticResourceAttributes.SERVICE_NAME]: telemetryConfig.serviceName, [SemanticResourceAttributes.SERVICE_VERSION]: telemetryConfig.serviceVersion, [SemanticResourceAttributes.DEPLOYMENT_ENVIRONMENT]: process.env.NODE_ENV, }) - + const sdk = new NodeSDK({ resource: configResource, spanProcessors: [spanProcessor], }) - + sdk.start() - + const shutdownHandler = async () => { - await sdk.shutdown() + await sdk + .shutdown() .then(() => logger.info('OpenTelemetry SDK shut down successfully')) - .catch(err => logger.error('Error shutting down OpenTelemetry SDK', err)) + .catch((err) => logger.error('Error shutting down OpenTelemetry SDK', err)) } - + process.on('SIGTERM', shutdownHandler) process.on('SIGINT', shutdownHandler) - + logger.info('OpenTelemetry instrumentation initialized for server-side telemetry') } catch (error) { logger.error('Failed to initialize OpenTelemetry instrumentation', error) } } -} +} -export const onRequestError = Sentry.captureRequestError \ No newline at end of file +export const onRequestError = Sentry.captureRequestError diff --git a/sim/lib/auth-client.ts b/apps/sim/lib/auth-client.ts similarity index 80% rename from sim/lib/auth-client.ts rename to apps/sim/lib/auth-client.ts index 9aa82d764..ff72437bc 100644 --- a/sim/lib/auth-client.ts +++ b/apps/sim/lib/auth-client.ts @@ -1,5 +1,5 @@ -import { emailOTPClient, genericOAuthClient } from 'better-auth/client/plugins' import { stripeClient } from '@better-auth/stripe/client' +import { emailOTPClient, genericOAuthClient } from 'better-auth/client/plugins' import { organizationClient } from 'better-auth/client/plugins' import { createAuthClient } from 'better-auth/react' import { isProd } from '@/lib/environment' @@ -16,7 +16,7 @@ export function getBaseURL() { } else if (process.env.NODE_ENV === 'development') { baseURL = process.env.BETTER_AUTH_URL } - + return baseURL } @@ -26,11 +26,13 @@ export const client = createAuthClient({ emailOTPClient(), genericOAuthClient(), // Only include Stripe client in production - ...(isProd ? [ - stripeClient({ - subscription: true // Enable subscription management - }) - ] : []), + ...(isProd + ? [ + stripeClient({ + subscription: true, // Enable subscription management + }), + ] + : []), organizationClient(), ], }) @@ -42,18 +44,20 @@ export const useSubscription = () => { if (!isProd) { return { list: async () => ({ data: [] }), - upgrade: async () => ({ error: { message: "Subscriptions are disabled in development mode" } }), + upgrade: async () => ({ + error: { message: 'Subscriptions are disabled in development mode' }, + }), cancel: async () => ({ data: null }), - restore: async () => ({ data: null }) + restore: async () => ({ data: null }), } } - + // In production, use the real implementation return { list: client.subscription?.list, upgrade: client.subscription?.upgrade, cancel: client.subscription?.cancel, - restore: client.subscription?.restore + restore: client.subscription?.restore, } } diff --git a/sim/lib/auth.ts b/apps/sim/lib/auth.ts similarity index 71% rename from sim/lib/auth.ts rename to apps/sim/lib/auth.ts index 05dcc5be6..d2dbd2ef0 100644 --- a/sim/lib/auth.ts +++ b/apps/sim/lib/auth.ts @@ -1,21 +1,21 @@ import { headers } from 'next/headers' +import { stripe } from '@better-auth/stripe' import { betterAuth } from 'better-auth' import { drizzleAdapter } from 'better-auth/adapters/drizzle' import { nextCookies } from 'better-auth/next-js' import { emailOTP, genericOAuth, organization } from 'better-auth/plugins' -import { stripe } from '@better-auth/stripe' -import Stripe from 'stripe' +import { and, eq } from 'drizzle-orm' import { Resend } from 'resend' +import Stripe from 'stripe' import { getEmailSubject, + renderInvitationEmail, renderOTPEmail, renderPasswordResetEmail, - renderInvitationEmail, } from '@/components/emails/render-email' import { createLogger } from '@/lib/logs/console-logger' import { db } from '@/db' import * as schema from '@/db/schema' -import { eq, and } from 'drizzle-orm' const logger = createLogger('Auth') @@ -23,12 +23,15 @@ const isProd = process.env.NODE_ENV === 'production' // Only initialize Stripe if the key is provided // This allows local development without a Stripe account -const validStripeKey = process.env.STRIPE_SECRET_KEY && process.env.STRIPE_SECRET_KEY.trim() !== '' && process.env.STRIPE_SECRET_KEY !== 'placeholder' +const validStripeKey = + process.env.STRIPE_SECRET_KEY && + process.env.STRIPE_SECRET_KEY.trim() !== '' && + process.env.STRIPE_SECRET_KEY !== 'placeholder' let stripeClient = null if (validStripeKey) { stripeClient = new Stripe(process.env.STRIPE_SECRET_KEY || '', { - apiVersion: "2025-02-24.acacia", + apiVersion: '2025-02-24.acacia', }) } @@ -69,36 +72,37 @@ export const auth = betterAuth({ session: { create: { before: async (session) => { - try { + try { // Find the first organization this user is a member of - const members = await db.select() + const members = await db + .select() .from(schema.member) .where(eq(schema.member.userId, session.userId)) - .limit(1); - + .limit(1) + if (members.length > 0) { - logger.info('Found organization for user', { - userId: session.userId, - organizationId: members[0].organizationId - }); - + logger.info('Found organization for user', { + userId: session.userId, + organizationId: members[0].organizationId, + }) + return { data: { ...session, - activeOrganizationId: members[0].organizationId - } - }; + activeOrganizationId: members[0].organizationId, + }, + } } else { - logger.info('No organizations found for user', { userId: session.userId }); - return { data: session }; + logger.info('No organizations found for user', { userId: session.userId }) + return { data: session } } } catch (error) { - logger.error('Error setting active organization', { error, userId: session.userId }); - return { data: session }; + logger.error('Error setting active organization', { error, userId: session.userId }) + return { data: session } } - } - } - } + }, + }, + }, }, account: { accountLinking: { @@ -481,12 +485,7 @@ export const auth = betterAuth({ authorizationUrl: 'https://auth.atlassian.com/authorize', tokenUrl: 'https://auth.atlassian.com/oauth/token', userInfoUrl: 'https://api.atlassian.com/me', - scopes: [ - 'read:page:confluence', - 'write:page:confluence', - 'read:me', - 'offline_access', - ], + scopes: ['read:page:confluence', 'write:page:confluence', 'read:me', 'offline_access'], responseType: 'code', pkce: true, accessType: 'offline', @@ -546,16 +545,16 @@ export const auth = betterAuth({ 'read:issue-type:jira', 'read:me', 'offline_access', - 'read:issue-meta:jira', - 'read:issue-security-level:jira', - 'read:issue.vote:jira', + 'read:issue-meta:jira', + 'read:issue-security-level:jira', + 'read:issue.vote:jira', 'read:issue.changelog:jira', 'read:avatar:jira', 'read:issue:jira', 'read:status:jira', 'read:user:jira', 'read:field-configuration:jira', - 'read:issue-details:jira' + 'read:issue-details:jira', ], responseType: 'code', pkce: true, @@ -669,233 +668,246 @@ export const auth = betterAuth({ ], }), // Only include the Stripe plugin in production - ...(isProd && stripeClient ? [ - stripe({ - stripeClient, - stripeWebhookSecret: process.env.STRIPE_WEBHOOK_SECRET || '', - createCustomerOnSignUp: true, - onCustomerCreate: async ({ customer, stripeCustomer, user }, request) => { - logger.info('Stripe customer created', { - customerId: customer.id, - userId: user.id - }) - }, - subscription: { - enabled: true, - plans: [ - { - name: 'free', - priceId: process.env.STRIPE_FREE_PRICE_ID || '', - limits: { - cost: process.env.FREE_TIER_COST_LIMIT ? parseInt(process.env.FREE_TIER_COST_LIMIT) : 5, - sharingEnabled: 0, - multiplayerEnabled: 0, - workspaceCollaborationEnabled: 0 - } + ...(isProd && stripeClient + ? [ + stripe({ + stripeClient, + stripeWebhookSecret: process.env.STRIPE_WEBHOOK_SECRET || '', + createCustomerOnSignUp: true, + onCustomerCreate: async ({ customer, stripeCustomer, user }, request) => { + logger.info('Stripe customer created', { + customerId: customer.id, + userId: user.id, + }) }, - { - name: 'pro', - priceId: process.env.STRIPE_PRO_PRICE_ID || '', - limits: { - cost: process.env.PRO_TIER_COST_LIMIT ? parseInt(process.env.PRO_TIER_COST_LIMIT) : 20, - sharingEnabled: 1, - multiplayerEnabled: 0, - workspaceCollaborationEnabled: 0 - } - }, - { - name: 'team', - priceId: process.env.STRIPE_TEAM_PRICE_ID || '', - limits: { - cost: process.env.TEAM_TIER_COST_LIMIT ? parseInt(process.env.TEAM_TIER_COST_LIMIT) : 40, // $40 per seat - sharingEnabled: 1, - multiplayerEnabled: 1, - workspaceCollaborationEnabled: 1 - } - } - ], - authorizeReference: async ({ user, referenceId, action }) => { - // User can always manage their own subscriptions - if (referenceId === user.id) { - return true - } - - // Check if referenceId is an organizationId the user has admin rights to - const members = await db.select() - .from(schema.member) - .where( - and( - eq(schema.member.userId, user.id), - eq(schema.member.organizationId, referenceId) - ) - ) - - const member = members[0] - - // Allow if the user is an owner or admin of the organization - return member?.role === 'owner' || member?.role === 'admin' - }, - getCheckoutSessionParams: async ({ user, plan, subscription }, request) => { - if (plan.name === 'team') { - return { - params: { - allow_promotion_codes: true, - line_items: [ - { - price: plan.priceId, - quantity: subscription?.seats || 1, - adjustable_quantity: { - enabled: true, - minimum: 1, - maximum: 50 - } - } - ] + subscription: { + enabled: true, + plans: [ + { + name: 'free', + priceId: process.env.STRIPE_FREE_PRICE_ID || '', + limits: { + cost: process.env.FREE_TIER_COST_LIMIT + ? parseInt(process.env.FREE_TIER_COST_LIMIT) + : 5, + sharingEnabled: 0, + multiplayerEnabled: 0, + workspaceCollaborationEnabled: 0, + }, + }, + { + name: 'pro', + priceId: process.env.STRIPE_PRO_PRICE_ID || '', + limits: { + cost: process.env.PRO_TIER_COST_LIMIT + ? parseInt(process.env.PRO_TIER_COST_LIMIT) + : 20, + sharingEnabled: 1, + multiplayerEnabled: 0, + workspaceCollaborationEnabled: 0, + }, + }, + { + name: 'team', + priceId: process.env.STRIPE_TEAM_PRICE_ID || '', + limits: { + cost: process.env.TEAM_TIER_COST_LIMIT + ? parseInt(process.env.TEAM_TIER_COST_LIMIT) + : 40, // $40 per seat + sharingEnabled: 1, + multiplayerEnabled: 1, + workspaceCollaborationEnabled: 1, + }, + }, + ], + authorizeReference: async ({ user, referenceId, action }) => { + // User can always manage their own subscriptions + if (referenceId === user.id) { + return true } - } - } - - return { - params: { - allow_promotion_codes: true - } - } - }, - onSubscriptionComplete: async ({ - event, - stripeSubscription, - subscription - }: { - event: Stripe.Event - stripeSubscription: Stripe.Subscription - subscription: any - }) => { - logger.info('Subscription created', { - subscriptionId: subscription.id, - referenceId: subscription.referenceId, - plan: subscription.plan, - status: subscription.status - }) - }, - onSubscriptionUpdate: async ({ - event, - subscription - }: { - event: Stripe.Event - subscription: any - }) => { - logger.info('Subscription updated', { - subscriptionId: subscription.id, - status: subscription.status - }) - }, - onSubscriptionDeleted: async ({ - event, - stripeSubscription, - subscription - }: { - event: Stripe.Event - stripeSubscription: Stripe.Subscription - subscription: any - }) => { - logger.info('Subscription deleted', { - subscriptionId: subscription.id, - referenceId: subscription.referenceId - }) - }, - }, - }), - // Add organization plugin as a separate entry in the plugins array - organization({ - // Allow team plan subscribers to create organizations - allowUserToCreateOrganization: async (user) => { - // Get subscription data - const dbSubscriptions = await db.select() - .from(schema.subscription) - .where(eq(schema.subscription.referenceId, user.id)) - - // Check if user has active team subscription - const hasTeamPlan = dbSubscriptions.some( - sub => (sub.status === 'active') && sub.plan === 'team' - ) - - return hasTeamPlan - }, - // Set a fixed membership limit of 50, but the actual limit will be enforced in the invitation flow - membershipLimit: 50, - // Validate seat limits before sending invitations - beforeInvite: async ({ organization }: { organization: { id: string } }) => { - // Get subscription for this organization - const subscriptions = await db.select() - .from(schema.subscription) - .where( - and( - eq(schema.subscription.referenceId, organization.id), - eq(schema.subscription.status, 'active') + + // Check if referenceId is an organizationId the user has admin rights to + const members = await db + .select() + .from(schema.member) + .where( + and( + eq(schema.member.userId, user.id), + eq(schema.member.organizationId, referenceId) + ) + ) + + const member = members[0] + + // Allow if the user is an owner or admin of the organization + return member?.role === 'owner' || member?.role === 'admin' + }, + getCheckoutSessionParams: async ({ user, plan, subscription }, request) => { + if (plan.name === 'team') { + return { + params: { + allow_promotion_codes: true, + line_items: [ + { + price: plan.priceId, + quantity: subscription?.seats || 1, + adjustable_quantity: { + enabled: true, + minimum: 1, + maximum: 50, + }, + }, + ], + }, + } + } + + return { + params: { + allow_promotion_codes: true, + }, + } + }, + onSubscriptionComplete: async ({ + event, + stripeSubscription, + subscription, + }: { + event: Stripe.Event + stripeSubscription: Stripe.Subscription + subscription: any + }) => { + logger.info('Subscription created', { + subscriptionId: subscription.id, + referenceId: subscription.referenceId, + plan: subscription.plan, + status: subscription.status, + }) + }, + onSubscriptionUpdate: async ({ + event, + subscription, + }: { + event: Stripe.Event + subscription: any + }) => { + logger.info('Subscription updated', { + subscriptionId: subscription.id, + status: subscription.status, + }) + }, + onSubscriptionDeleted: async ({ + event, + stripeSubscription, + subscription, + }: { + event: Stripe.Event + stripeSubscription: Stripe.Subscription + subscription: any + }) => { + logger.info('Subscription deleted', { + subscriptionId: subscription.id, + referenceId: subscription.referenceId, + }) + }, + }, + }), + // Add organization plugin as a separate entry in the plugins array + organization({ + // Allow team plan subscribers to create organizations + allowUserToCreateOrganization: async (user) => { + // Get subscription data + const dbSubscriptions = await db + .select() + .from(schema.subscription) + .where(eq(schema.subscription.referenceId, user.id)) + + // Check if user has active team subscription + const hasTeamPlan = dbSubscriptions.some( + (sub) => sub.status === 'active' && sub.plan === 'team' ) - ) - - const teamSubscription = subscriptions.find(sub => sub.plan === 'team') - - if (!teamSubscription) { - throw new Error('No active team subscription for this organization') - } - - // Count current members + pending invitations - const members = await db.select() - .from(schema.member) - .where(eq(schema.member.organizationId, organization.id)) - - const pendingInvites = await db.select() - .from(schema.invitation) - .where( - and( - eq(schema.invitation.organizationId, organization.id), - eq(schema.invitation.status, 'pending') - ) - ) - - const totalCount = members.length + pendingInvites.length - const seatLimit = teamSubscription.seats || 1 - - if (totalCount >= seatLimit) { - throw new Error(`Organization has reached its seat limit of ${seatLimit}`) - } - }, - sendInvitationEmail: async (data: any) => { - try { - const { invitation, organization, inviter } = data - - const inviteUrl = `${process.env.NEXT_PUBLIC_APP_URL}/invite/${invitation.id}` - const inviterName = inviter.user?.name || 'A team member' - - const html = await renderInvitationEmail( - inviterName, - organization.name, - inviteUrl, - invitation.email - ) - - await resend.emails.send({ - from: 'Sim Studio ', - to: invitation.email, - subject: `${inviterName} has invited you to join ${organization.name} on Sim Studio`, - html, - }) - } catch (error) { - logger.error('Error sending invitation email', { error }) - } - }, - organizationCreation: { - afterCreate: async ({ organization, member, user }) => { - logger.info('Organization created', { - organizationId: organization.id, - creatorId: user.id - }) - } - }, - }) - ] : []), + + return hasTeamPlan + }, + // Set a fixed membership limit of 50, but the actual limit will be enforced in the invitation flow + membershipLimit: 50, + // Validate seat limits before sending invitations + beforeInvite: async ({ organization }: { organization: { id: string } }) => { + // Get subscription for this organization + const subscriptions = await db + .select() + .from(schema.subscription) + .where( + and( + eq(schema.subscription.referenceId, organization.id), + eq(schema.subscription.status, 'active') + ) + ) + + const teamSubscription = subscriptions.find((sub) => sub.plan === 'team') + + if (!teamSubscription) { + throw new Error('No active team subscription for this organization') + } + + // Count current members + pending invitations + const members = await db + .select() + .from(schema.member) + .where(eq(schema.member.organizationId, organization.id)) + + const pendingInvites = await db + .select() + .from(schema.invitation) + .where( + and( + eq(schema.invitation.organizationId, organization.id), + eq(schema.invitation.status, 'pending') + ) + ) + + const totalCount = members.length + pendingInvites.length + const seatLimit = teamSubscription.seats || 1 + + if (totalCount >= seatLimit) { + throw new Error(`Organization has reached its seat limit of ${seatLimit}`) + } + }, + sendInvitationEmail: async (data: any) => { + try { + const { invitation, organization, inviter } = data + + const inviteUrl = `${process.env.NEXT_PUBLIC_APP_URL}/invite/${invitation.id}` + const inviterName = inviter.user?.name || 'A team member' + + const html = await renderInvitationEmail( + inviterName, + organization.name, + inviteUrl, + invitation.email + ) + + await resend.emails.send({ + from: 'Sim Studio ', + to: invitation.email, + subject: `${inviterName} has invited you to join ${organization.name} on Sim Studio`, + html, + }) + } catch (error) { + logger.error('Error sending invitation email', { error }) + } + }, + organizationCreation: { + afterCreate: async ({ organization, member, user }) => { + logger.info('Organization created', { + organizationId: organization.id, + creatorId: user.id, + }) + }, + }, + }), + ] + : []), ], pages: { signIn: '/login', diff --git a/sim/lib/environment.ts b/apps/sim/lib/environment.ts similarity index 89% rename from sim/lib/environment.ts rename to apps/sim/lib/environment.ts index c8d02aeff..1bb0f9949 100644 --- a/sim/lib/environment.ts +++ b/apps/sim/lib/environment.ts @@ -26,7 +26,5 @@ export const isHosted = process.env.NEXT_PUBLIC_APP_URL === 'https://www.simstud * Get cost multiplier based on environment */ export function getCostMultiplier(): number { - return isProd - ? parseFloat(process.env.COST_MULTIPLIER!) || 1 - : 1 -} \ No newline at end of file + return isProd ? parseFloat(process.env.COST_MULTIPLIER!) || 1 : 1 +} diff --git a/sim/lib/file-parsers/csv-parser.ts b/apps/sim/lib/file-parsers/csv-parser.ts similarity index 100% rename from sim/lib/file-parsers/csv-parser.ts rename to apps/sim/lib/file-parsers/csv-parser.ts diff --git a/sim/lib/file-parsers/docx-parser.ts b/apps/sim/lib/file-parsers/docx-parser.ts similarity index 100% rename from sim/lib/file-parsers/docx-parser.ts rename to apps/sim/lib/file-parsers/docx-parser.ts diff --git a/sim/lib/file-parsers/index.test.ts b/apps/sim/lib/file-parsers/index.test.ts similarity index 100% rename from sim/lib/file-parsers/index.test.ts rename to apps/sim/lib/file-parsers/index.test.ts diff --git a/sim/lib/file-parsers/index.ts b/apps/sim/lib/file-parsers/index.ts similarity index 100% rename from sim/lib/file-parsers/index.ts rename to apps/sim/lib/file-parsers/index.ts diff --git a/sim/lib/file-parsers/pdf-parser.ts b/apps/sim/lib/file-parsers/pdf-parser.ts similarity index 100% rename from sim/lib/file-parsers/pdf-parser.ts rename to apps/sim/lib/file-parsers/pdf-parser.ts diff --git a/sim/lib/file-parsers/raw-pdf-parser.ts b/apps/sim/lib/file-parsers/raw-pdf-parser.ts similarity index 100% rename from sim/lib/file-parsers/raw-pdf-parser.ts rename to apps/sim/lib/file-parsers/raw-pdf-parser.ts diff --git a/sim/lib/file-parsers/types.ts b/apps/sim/lib/file-parsers/types.ts similarity index 100% rename from sim/lib/file-parsers/types.ts rename to apps/sim/lib/file-parsers/types.ts diff --git a/sim/lib/freestyle.ts b/apps/sim/lib/freestyle.ts similarity index 100% rename from sim/lib/freestyle.ts rename to apps/sim/lib/freestyle.ts diff --git a/sim/lib/generate-chat-title.ts b/apps/sim/lib/generate-chat-title.ts similarity index 80% rename from sim/lib/generate-chat-title.ts rename to apps/sim/lib/generate-chat-title.ts index 46b84d86e..4cbe56899 100644 --- a/sim/lib/generate-chat-title.ts +++ b/apps/sim/lib/generate-chat-title.ts @@ -14,18 +14,19 @@ export async function generateChatTitle(message: string): Promise try { const openai = new OpenAI({ apiKey }) - + const response = await openai.chat.completions.create({ model: 'gpt-3.5-turbo', messages: [ { role: 'system', - content: 'Generate a very short title (3-5 words max) for a chat that starts with this message. The title should be concise and descriptive.' + content: + 'Generate a very short title (3-5 words max) for a chat that starts with this message. The title should be concise and descriptive.', }, { role: 'user', - content: message - } + content: message, + }, ], max_tokens: 20, temperature: 0.7, @@ -37,4 +38,4 @@ export async function generateChatTitle(message: string): Promise console.error('Error generating chat title:', error) return null } -} \ No newline at end of file +} diff --git a/sim/lib/logs/console-logger.ts b/apps/sim/lib/logs/console-logger.ts similarity index 100% rename from sim/lib/logs/console-logger.ts rename to apps/sim/lib/logs/console-logger.ts diff --git a/sim/lib/logs/execution-logger.ts b/apps/sim/lib/logs/execution-logger.ts similarity index 96% rename from sim/lib/logs/execution-logger.ts rename to apps/sim/lib/logs/execution-logger.ts index 9b2c33d04..7d4af2155 100644 --- a/sim/lib/logs/execution-logger.ts +++ b/apps/sim/lib/logs/execution-logger.ts @@ -1,12 +1,12 @@ import { eq, sql } from 'drizzle-orm' import { v4 as uuidv4 } from 'uuid' +import { getCostMultiplier } from '@/lib/environment' import { createLogger } from '@/lib/logs/console-logger' import { db } from '@/db' import { userStats, workflow, workflowLogs } from '@/db/schema' import { ExecutionResult as ExecutorResult } from '@/executor/types' -import { stripCustomToolPrefix } from '../workflows/utils' -import { getCostMultiplier } from '@/lib/environment' import { calculateCost } from '@/providers/utils' +import { stripCustomToolPrefix } from '../workflows/utils' const logger = createLogger('ExecutionLogger') @@ -110,52 +110,56 @@ export async function persistExecutionLogs( hasToolCalls: !!log.output.toolCalls, hasResponse: !!log.output.response, }) - + // FIRST PASS - Check if this is a no-tool scenario with tokens data not propagated // In some cases, the token data from the streaming callback doesn't properly get into // the agent block response. This ensures we capture it. - if (log.output.response && - (!log.output.response.tokens?.completion || log.output.response.tokens.completion === 0) && - (!log.output.response.toolCalls || !log.output.response.toolCalls.list || log.output.response.toolCalls.list.length === 0)) { - + if ( + log.output.response && + (!log.output.response.tokens?.completion || + log.output.response.tokens.completion === 0) && + (!log.output.response.toolCalls || + !log.output.response.toolCalls.list || + log.output.response.toolCalls.list.length === 0) + ) { // Check if output response has providerTiming - this indicates it's a streaming response if (log.output.response.providerTiming) { logger.debug('Processing streaming response without tool calls for token extraction', { blockId: log.blockId, hasTokens: !!log.output.response.tokens, - hasProviderTiming: !!log.output.response.providerTiming - }); - + hasProviderTiming: !!log.output.response.providerTiming, + }) + // Only for no-tool streaming cases, extract content length and estimate token count - const contentLength = log.output.response.content?.length || 0; + const contentLength = log.output.response.content?.length || 0 if (contentLength > 0) { // Estimate completion tokens based on content length as a fallback - const estimatedCompletionTokens = Math.ceil(contentLength / 4); - const promptTokens = log.output.response.tokens?.prompt || 8; - + const estimatedCompletionTokens = Math.ceil(contentLength / 4) + const promptTokens = log.output.response.tokens?.prompt || 8 + // Update the tokens object log.output.response.tokens = { prompt: promptTokens, completion: estimatedCompletionTokens, - total: promptTokens + estimatedCompletionTokens - }; - + total: promptTokens + estimatedCompletionTokens, + } + // Update cost information using the provider's cost model - const model = log.output.response.model || 'gpt-4o'; - const costInfo = calculateCost(model, promptTokens, estimatedCompletionTokens); + const model = log.output.response.model || 'gpt-4o' + const costInfo = calculateCost(model, promptTokens, estimatedCompletionTokens) log.output.response.cost = { input: costInfo.input, output: costInfo.output, total: costInfo.total, - pricing: costInfo.pricing - }; - + pricing: costInfo.pricing, + } + logger.debug('Updated token information for streaming no-tool response', { blockId: log.blockId, contentLength, estimatedCompletionTokens, - tokens: log.output.response.tokens - }); + tokens: log.output.response.tokens, + }) } } } @@ -166,12 +170,14 @@ export async function persistExecutionLogs( logger.debug('Found streaming response with executionData', { blockId: log.blockId, hasExecutionData: !!log.output.executionData, - executionDataKeys: log.output.executionData ? Object.keys(log.output.executionData) : [], + executionDataKeys: log.output.executionData + ? Object.keys(log.output.executionData) + : [], }) // Extract the executionData and use it as our primary source of information const executionData = log.output.executionData - + // If executionData has output with response, use that as our response // This is especially important for streaming responses where the final content // is set in the executionData structure by the executor @@ -428,12 +434,12 @@ export async function persistExecutionLogs( // Case 5: Look in executionData.output.response for streaming responses else if (log.output.executionData?.output?.response?.toolCalls) { const toolCallsObj = log.output.executionData.output.response.toolCalls - const list = Array.isArray(toolCallsObj) ? toolCallsObj : (toolCallsObj.list || []) - + const list = Array.isArray(toolCallsObj) ? toolCallsObj : toolCallsObj.list || [] + logger.debug('Found toolCalls in executionData output response', { count: list.length, }) - + // Log raw timing data for debugging list.forEach((tc: any, idx: number) => { logger.debug(`executionData toolCalls ${idx} raw timing data:`, { @@ -445,7 +451,7 @@ export async function persistExecutionLogs( argumentKeys: tc.arguments ? Object.keys(tc.arguments) : undefined, }) }) - + toolCallData = list.map((toolCall: any) => { // Extract timing info - try various formats that providers might use const duration = extractDuration(toolCall) @@ -454,7 +460,7 @@ export async function persistExecutionLogs( blockStartTime ? new Date(blockStartTime) : undefined, blockEndTime ? new Date(blockEndTime) : undefined ) - + return { name: toolCall.name, duration: duration, @@ -567,8 +573,8 @@ export async function persistExecutionLogs( level: log.success ? 'info' : 'error', message: log.success ? `Block ${log.blockName || log.blockId} (${log.blockType || 'unknown'}): ${ - log.output?.response?.content || - log.output?.executionData?.output?.response?.content || + log.output?.response?.content || + log.output?.executionData?.output?.response?.content || JSON.stringify(log.output?.response || {}) }` : `Block ${log.blockName || log.blockId} (${log.blockType || 'unknown'}): ${log.error || 'Failed'}`, @@ -639,11 +645,15 @@ export async function persistExecutionLogs( // If result has a direct cost field (for streaming responses completed with calculated cost), // use that as a safety check to ensure we have cost data - if (result.metadata && 'cost' in result.metadata && (!workflowMetadata.cost || workflowMetadata.cost.total <= 0)) { + if ( + result.metadata && + 'cost' in result.metadata && + (!workflowMetadata.cost || workflowMetadata.cost.total <= 0) + ) { const resultCost = (result.metadata as any).cost workflowMetadata.cost = { model: primaryModel, - total: typeof resultCost === 'number' ? resultCost : (resultCost?.total || 0), + total: typeof resultCost === 'number' ? resultCost : resultCost?.total || 0, input: resultCost?.input || 0, output: resultCost?.output || 0, tokens: { @@ -745,7 +755,9 @@ export async function persistExecutionError( } // Helper functions for trigger-specific messages -function getTriggerSuccessMessage(triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'): string { +function getTriggerSuccessMessage( + triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' +): string { switch (triggerType) { case 'api': return 'API workflow executed successfully' @@ -762,7 +774,9 @@ function getTriggerSuccessMessage(triggerType: 'api' | 'webhook' | 'schedule' | } } -function getTriggerErrorPrefix(triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat'): string { +function getTriggerErrorPrefix( + triggerType: 'api' | 'webhook' | 'schedule' | 'manual' | 'chat' +): string { switch (triggerType) { case 'api': return 'API workflow' diff --git a/sim/lib/logs/trace-spans.ts b/apps/sim/lib/logs/trace-spans.ts similarity index 90% rename from sim/lib/logs/trace-spans.ts rename to apps/sim/lib/logs/trace-spans.ts index fed654708..ad8b57068 100644 --- a/sim/lib/logs/trace-spans.ts +++ b/apps/sim/lib/logs/trace-spans.ts @@ -80,18 +80,18 @@ export function buildTraceSpans(result: ExecutionResult): { // Ensure we have valid startTime and endTime let segmentStart: number let segmentEnd: number - + // Handle different time formats - some providers use ISO strings, some use timestamps if (typeof segment.startTime === 'string') { try { segmentStart = new Date(segment.startTime).getTime() } catch (e) { - segmentStart = segmentStartTime + (index * 1000) // Fallback offset + segmentStart = segmentStartTime + index * 1000 // Fallback offset } } else { segmentStart = segment.startTime } - + if (typeof segment.endTime === 'string') { try { segmentEnd = new Date(segment.endTime).getTime() @@ -101,21 +101,26 @@ export function buildTraceSpans(result: ExecutionResult): { } else { segmentEnd = segment.endTime } - + // For streaming responses, make sure our timing is valid if (isNaN(segmentStart) || isNaN(segmentEnd) || segmentEnd < segmentStart) { // Use fallback values - segmentStart = segmentStartTime + (index * 1000) + segmentStart = segmentStartTime + index * 1000 segmentEnd = segmentStart + (segment.duration || 1000) } - + const childSpan: TraceSpan = { id: `${spanId}-segment-${index}`, name: segment.name || `${segment.type} operation`, startTime: new Date(segmentStart).toISOString(), endTime: new Date(segmentEnd).toISOString(), - duration: segment.duration || (segmentEnd - segmentStart), - type: segment.type === 'model' ? 'model' : segment.type === 'tool' ? 'tool' : 'processing', + duration: segment.duration || segmentEnd - segmentStart, + type: + segment.type === 'model' + ? 'model' + : segment.type === 'tool' + ? 'tool' + : 'processing', status: 'success', children: [], } @@ -123,7 +128,7 @@ export function buildTraceSpans(result: ExecutionResult): { // Add any additional metadata if (segment.type === 'tool' && typeof segment.name === 'string') { // Add as a custom attribute using type assertion - (childSpan as any).toolName = segment.name + ;(childSpan as any).toolName = segment.name } children.push(childSpan) @@ -192,53 +197,55 @@ export function buildTraceSpans(result: ExecutionResult): { // 3. Streaming response formats with executionData // Check all possible paths for toolCalls - let toolCallsList = null; - - // Wrap extraction in try-catch to handle unexpected toolCalls formats + let toolCallsList = null + + // Wrap extraction in try-catch to handle unexpected toolCalls formats try { if (log.output?.response?.toolCalls?.list) { // Standard format with list property - toolCallsList = log.output.response.toolCalls.list; + toolCallsList = log.output.response.toolCalls.list } else if (Array.isArray(log.output?.response?.toolCalls)) { // Direct array format - toolCallsList = log.output.response.toolCalls; + toolCallsList = log.output.response.toolCalls } else if (log.output?.executionData?.output?.response?.toolCalls) { // Streaming format with executionData - const tcObj = log.output.executionData.output.response.toolCalls; - toolCallsList = Array.isArray(tcObj) ? tcObj : (tcObj.list || []); + const tcObj = log.output.executionData.output.response.toolCalls + toolCallsList = Array.isArray(tcObj) ? tcObj : tcObj.list || [] } - + // Validate that toolCallsList is actually an array before processing if (toolCallsList && !Array.isArray(toolCallsList)) { - console.warn(`toolCallsList is not an array: ${typeof toolCallsList}`); - toolCallsList = []; + console.warn(`toolCallsList is not an array: ${typeof toolCallsList}`) + toolCallsList = [] } } catch (error) { - console.error(`Error extracting toolCalls: ${error}`); - toolCallsList = []; // Set to empty array as fallback + console.error(`Error extracting toolCalls: ${error}`) + toolCallsList = [] // Set to empty array as fallback } if (toolCallsList && toolCallsList.length > 0) { - span.toolCalls = toolCallsList.map((tc: any) => { - // Add null check for each tool call - if (!tc) return null; - - try { - return { - name: stripCustomToolPrefix(tc.name || 'unnamed-tool'), - duration: tc.duration || 0, - startTime: tc.startTime || log.startedAt, - endTime: tc.endTime || log.endedAt, - status: tc.error ? 'error' : 'success', - input: tc.arguments || tc.input, - output: tc.result || tc.output, - error: tc.error, - }; - } catch (tcError) { - console.error(`Error processing tool call: ${tcError}`); - return null; - } - }).filter(Boolean); // Remove any null entries from failed processing + span.toolCalls = toolCallsList + .map((tc: any) => { + // Add null check for each tool call + if (!tc) return null + + try { + return { + name: stripCustomToolPrefix(tc.name || 'unnamed-tool'), + duration: tc.duration || 0, + startTime: tc.startTime || log.startedAt, + endTime: tc.endTime || log.endedAt, + status: tc.error ? 'error' : 'success', + input: tc.arguments || tc.input, + output: tc.result || tc.output, + error: tc.error, + } + } catch (tcError) { + console.error(`Error processing tool call: ${tcError}`) + return null + } + }) + .filter(Boolean) // Remove any null entries from failed processing } } diff --git a/sim/lib/mailer.ts b/apps/sim/lib/mailer.ts similarity index 86% rename from sim/lib/mailer.ts rename to apps/sim/lib/mailer.ts index 640ae66a3..94559319f 100644 --- a/sim/lib/mailer.ts +++ b/apps/sim/lib/mailer.ts @@ -95,7 +95,7 @@ export async function sendBatchEmails({ logger.info('Batch emails not sent (Resend not configured):', { emailCount: emails.length, }) - + // Create mock results for each email emails.forEach(() => { results.push({ @@ -104,7 +104,7 @@ export async function sendBatchEmails({ data: { id: 'mock-email-id' }, }) }) - + return { success: true, message: 'Batch email logging successful (Resend not configured)', @@ -114,7 +114,7 @@ export async function sendBatchEmails({ } // Prepare emails for batch sending - const batchEmails = emails.map(email => ({ + const batchEmails = emails.map((email) => ({ from: `Sim Studio <${email.from || senderEmail}>`, to: email.to, subject: email.subject, @@ -125,26 +125,28 @@ export async function sendBatchEmails({ // Process in chunks of 50 to be safe const BATCH_SIZE = 50 let allSuccessful = true - - const delay = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)) - + + const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) + let rateDelay = 500 - + for (let i = 0; i < batchEmails.length; i += BATCH_SIZE) { if (i > 0) { logger.info(`Rate limit protection: Waiting ${rateDelay}ms before sending next batch`) await delay(rateDelay) } - + const batch = batchEmails.slice(i, i + BATCH_SIZE) - + try { - logger.info(`Sending batch ${Math.floor(i/BATCH_SIZE) + 1} of ${Math.ceil(batchEmails.length/BATCH_SIZE)} (${batch.length} emails)`) + logger.info( + `Sending batch ${Math.floor(i / BATCH_SIZE) + 1} of ${Math.ceil(batchEmails.length / BATCH_SIZE)} (${batch.length} emails)` + ) const response = await resend.batch.send(batch) - + if (response.error) { logger.error('Resend batch API error:', response.error) - + // Add failure results for this batch batch.forEach(() => { results.push({ @@ -152,7 +154,7 @@ export async function sendBatchEmails({ message: response.error?.message || 'Failed to send batch email', }) }) - + allSuccessful = false } else if (response.data) { if (Array.isArray(response.data)) { @@ -176,31 +178,33 @@ export async function sendBatchEmails({ } } catch (error) { logger.error('Error sending batch emails:', error) - + // Check if it's a rate limit error - if (error instanceof Error && - (error.message.toLowerCase().includes('rate') || - error.message.toLowerCase().includes('too many') || - error.message.toLowerCase().includes('429'))) { + if ( + error instanceof Error && + (error.message.toLowerCase().includes('rate') || + error.message.toLowerCase().includes('too many') || + error.message.toLowerCase().includes('429')) + ) { logger.warn('Rate limit exceeded, increasing delay and retrying...') - + // Wait a bit longer and try again with this batch await delay(rateDelay * 5) - + try { - logger.info(`Retrying batch ${Math.floor(i/BATCH_SIZE) + 1} with longer delay`) + logger.info(`Retrying batch ${Math.floor(i / BATCH_SIZE) + 1} with longer delay`) const retryResponse = await resend.batch.send(batch) - + if (retryResponse.error) { logger.error('Retry failed with error:', retryResponse.error) - + batch.forEach(() => { results.push({ success: false, message: retryResponse.error?.message || 'Failed to send batch email after retry', }) }) - + allSuccessful = false } else if (retryResponse.data) { if (Array.isArray(retryResponse.data)) { @@ -220,21 +224,24 @@ export async function sendBatchEmails({ }) }) } - + // Increase the standard delay since we hit a rate limit logger.info('Increasing delay between batches after rate limit hit') rateDelay = rateDelay * 2 } } catch (retryError) { logger.error('Retry also failed:', retryError) - + batch.forEach(() => { results.push({ success: false, - message: retryError instanceof Error ? retryError.message : 'Failed to send email even after retry', + message: + retryError instanceof Error + ? retryError.message + : 'Failed to send email even after retry', }) }) - + allSuccessful = false } } else { @@ -245,7 +252,7 @@ export async function sendBatchEmails({ message: error instanceof Error ? error.message : 'Failed to send batch email', }) }) - + allSuccessful = false } } @@ -253,11 +260,11 @@ export async function sendBatchEmails({ return { success: allSuccessful, - message: allSuccessful - ? 'All batch emails sent successfully' + message: allSuccessful + ? 'All batch emails sent successfully' : 'Some batch emails failed to send', results, - data: { count: results.filter(r => r.success).length }, + data: { count: results.filter((r) => r.success).length }, } } catch (error) { logger.error('Error in batch email sending:', error) diff --git a/sim/lib/oauth.ts b/apps/sim/lib/oauth.ts similarity index 99% rename from sim/lib/oauth.ts rename to apps/sim/lib/oauth.ts index 14ae7130d..4e53f07a4 100644 --- a/sim/lib/oauth.ts +++ b/apps/sim/lib/oauth.ts @@ -9,10 +9,10 @@ import { GoogleDriveIcon, GoogleIcon, GoogleSheetsIcon, + JiraIcon, NotionIcon, SupabaseIcon, xIcon, - JiraIcon, } from '@/components/icons' import { createLogger } from '@/lib/logs/console-logger' @@ -42,7 +42,7 @@ export type OAuthService = | 'airtable' | 'notion' | 'jira' - + // Define the interface for OAuth provider configuration export interface OAuthProviderConfig { id: OAuthProvider @@ -188,12 +188,7 @@ export const OAUTH_PROVIDERS: Record = { providerId: 'confluence', icon: (props) => ConfluenceIcon(props), baseProviderIcon: (props) => ConfluenceIcon(props), - scopes: [ - 'read:page:confluence', - 'write:page:confluence', - 'read:me', - 'offline_access', - ], + scopes: ['read:page:confluence', 'write:page:confluence', 'read:me', 'offline_access'], }, }, defaultService: 'confluence', @@ -210,7 +205,8 @@ export const OAUTH_PROVIDERS: Record = { providerId: 'jira', icon: (props) => JiraIcon(props), baseProviderIcon: (props) => JiraIcon(props), - scopes: [ 'read:jira-user', + scopes: [ + 'read:jira-user', 'read:jira-work', 'write:jira-work', 'read:project:jira', diff --git a/sim/lib/redis.ts b/apps/sim/lib/redis.ts similarity index 100% rename from sim/lib/redis.ts rename to apps/sim/lib/redis.ts diff --git a/sim/lib/schedules/utils.test.ts b/apps/sim/lib/schedules/utils.test.ts similarity index 98% rename from sim/lib/schedules/utils.test.ts rename to apps/sim/lib/schedules/utils.test.ts index d365e9142..8a9822ccc 100644 --- a/sim/lib/schedules/utils.test.ts +++ b/apps/sim/lib/schedules/utils.test.ts @@ -5,12 +5,12 @@ import { beforeEach, describe, expect, it, vi } from 'vitest' import { BlockState, calculateNextRunTime, + createDateWithTimezone, generateCronExpression, getScheduleTimeValues, getSubBlockValue, parseCronToHumanReadable, parseTimeString, - createDateWithTimezone, } from './utils' describe('Schedule Utilities', () => { @@ -455,7 +455,7 @@ describe('Schedule Utilities', () => { '14:00', // 2:00 PM 'UTC' ) - expect(date.toISOString()).toBe('2025-04-21T14:00:00.000Z'); + expect(date.toISOString()).toBe('2025-04-21T14:00:00.000Z') }) it('should correctly handle America/Los_Angeles (UTC-7 during DST)', () => { @@ -466,7 +466,7 @@ describe('Schedule Utilities', () => { 'America/Los_Angeles' ) // 2:00 PM PDT should be 21:00 UTC (14 + 7) - expect(date.toISOString()).toBe('2025-04-21T21:00:00.000Z'); + expect(date.toISOString()).toBe('2025-04-21T21:00:00.000Z') }) it('should correctly handle America/Los_Angeles (UTC-8 outside DST)', () => { @@ -477,7 +477,7 @@ describe('Schedule Utilities', () => { 'America/Los_Angeles' ) // 2:00 PM PST should be 22:00 UTC (14 + 8) - expect(date.toISOString()).toBe('2025-01-10T22:00:00.000Z'); + expect(date.toISOString()).toBe('2025-01-10T22:00:00.000Z') }) it('should correctly handle America/New_York (UTC-4 during DST)', () => { @@ -488,7 +488,7 @@ describe('Schedule Utilities', () => { 'America/New_York' ) // 10:30 AM EDT should be 14:30 UTC (10.5 + 4) - expect(date.toISOString()).toBe('2025-06-15T14:30:00.000Z'); + expect(date.toISOString()).toBe('2025-06-15T14:30:00.000Z') }) it('should correctly handle America/New_York (UTC-5 outside DST)', () => { @@ -499,7 +499,7 @@ describe('Schedule Utilities', () => { 'America/New_York' ) // 10:30 AM EST should be 15:30 UTC (10.5 + 5) - expect(date.toISOString()).toBe('2025-12-20T15:30:00.000Z'); + expect(date.toISOString()).toBe('2025-12-20T15:30:00.000Z') }) it('should correctly handle Europe/London (UTC+1 during DST)', () => { @@ -510,7 +510,7 @@ describe('Schedule Utilities', () => { 'Europe/London' ) // 9:15 AM BST should be 08:15 UTC (9.25 - 1) - expect(date.toISOString()).toBe('2025-08-05T08:15:00.000Z'); + expect(date.toISOString()).toBe('2025-08-05T08:15:00.000Z') }) it('should correctly handle Europe/London (UTC+0 outside DST)', () => { @@ -521,7 +521,7 @@ describe('Schedule Utilities', () => { 'Europe/London' ) // 9:15 AM GMT should be 09:15 UTC (9.25 - 0) - expect(date.toISOString()).toBe('2025-02-10T09:15:00.000Z'); + expect(date.toISOString()).toBe('2025-02-10T09:15:00.000Z') }) it('should correctly handle Asia/Tokyo (UTC+9)', () => { @@ -532,18 +532,14 @@ describe('Schedule Utilities', () => { 'Asia/Tokyo' ) // 5:00 PM JST should be 08:00 UTC (17 - 9) - expect(date.toISOString()).toBe('2025-07-01T08:00:00.000Z'); + expect(date.toISOString()).toBe('2025-07-01T08:00:00.000Z') }) it('should handle date object input', () => { // Using a Date object that represents midnight UTC on the target day - const dateInput = new Date(Date.UTC(2025, 3, 21)); // April 21, 2025 - const date = createDateWithTimezone( - dateInput, - '14:00', - 'America/Los_Angeles' - ) - expect(date.toISOString()).toBe('2025-04-21T21:00:00.000Z'); + const dateInput = new Date(Date.UTC(2025, 3, 21)) // April 21, 2025 + const date = createDateWithTimezone(dateInput, '14:00', 'America/Los_Angeles') + expect(date.toISOString()).toBe('2025-04-21T21:00:00.000Z') }) it('should handle time crossing midnight due to timezone offset', () => { @@ -555,7 +551,7 @@ describe('Schedule Utilities', () => { 'Australia/Sydney' ) // 1:00 AM AEDT on Oct 15th should be 14:00 UTC on Oct 14th (1 - 11 = -10 -> previous day 14:00) - expect(date.toISOString()).toBe('2025-10-14T14:00:00.000Z'); + expect(date.toISOString()).toBe('2025-10-14T14:00:00.000Z') }) }) }) diff --git a/sim/lib/schedules/utils.ts b/apps/sim/lib/schedules/utils.ts similarity index 91% rename from sim/lib/schedules/utils.ts rename to apps/sim/lib/schedules/utils.ts index 77318c986..6fa935d47 100644 --- a/sim/lib/schedules/utils.ts +++ b/apps/sim/lib/schedules/utils.ts @@ -1,5 +1,5 @@ -import { formatDateTime } from '@/lib/utils' import { createLogger } from '@/lib/logs/console-logger' +import { formatDateTime } from '@/lib/utils' const logger = createLogger('ScheduleUtils') @@ -64,7 +64,7 @@ export function getScheduleTimeValues(starterBlock: BlockState): { } { // Extract schedule time (common field that can override others) const scheduleTime = getSubBlockValue(starterBlock, 'scheduleTime') - + // Extract schedule start date const scheduleStartAt = getSubBlockValue(starterBlock, 'scheduleStartAt') @@ -134,7 +134,9 @@ export function createDateWithTimezone( // 2. Create a tentative UTC Date object using the target date and time components // This assumes, for a moment, that the target H:M were meant for UTC. - const tentativeUTCDate = new Date(Date.UTC(year, monthIndex, day, targetHours, targetMinutes, 0)) + const tentativeUTCDate = new Date( + Date.UTC(year, monthIndex, day, targetHours, targetMinutes, 0) + ) // 3. If the target timezone is UTC, we're done. if (timezone === 'UTC') { @@ -155,7 +157,8 @@ export function createDateWithTimezone( }) const parts = formatter.formatToParts(tentativeUTCDate) - const getPart = (type: Intl.DateTimeFormatPartTypes) => parts.find(p => p.type === type)?.value + const getPart = (type: Intl.DateTimeFormatPartTypes) => + parts.find((p) => p.type === type)?.value const formattedYear = parseInt(getPart('year') || '0', 10) const formattedMonth = parseInt(getPart('month') || '0', 10) // 1-based @@ -179,14 +182,7 @@ export function createDateWithTimezone( // that resulted from the tentative UTC date. This difference represents the offset // needed to adjust the UTC time. // Create the intended local time as a UTC timestamp for comparison purposes. - const intendedLocalTimeAsUTC = Date.UTC( - year, - monthIndex, - day, - targetHours, - targetMinutes, - 0 - ) + const intendedLocalTimeAsUTC = Date.UTC(year, monthIndex, day, targetHours, targetMinutes, 0) // The offset needed for UTC time is the difference between the intended local time // and the actual local time (when both are represented as UTC timestamps). @@ -197,20 +193,21 @@ export function createDateWithTimezone( const finalDate = new Date(finalUTCTimeMilliseconds) return finalDate - } catch (e) { - logger.error("Error creating date with timezone:", e, { dateInput, timeStr, timezone }) + logger.error('Error creating date with timezone:', e, { dateInput, timeStr, timezone }) // Fallback to a simple UTC interpretation on error try { - const baseDate = typeof dateInput === 'string' ? new Date(dateInput) : new Date(dateInput) - const [hours, minutes] = parseTimeString(timeStr) - const year = baseDate.getUTCFullYear() - const monthIndex = baseDate.getUTCMonth() - const day = baseDate.getUTCDate() - return new Date(Date.UTC(year, monthIndex, day, hours, minutes, 0)) + const baseDate = typeof dateInput === 'string' ? new Date(dateInput) : new Date(dateInput) + const [hours, minutes] = parseTimeString(timeStr) + const year = baseDate.getUTCFullYear() + const monthIndex = baseDate.getUTCMonth() + const day = baseDate.getUTCDate() + return new Date(Date.UTC(year, monthIndex, day, hours, minutes, 0)) } catch (fallbackError) { - logger.error("Error during fallback date creation:", fallbackError) - throw new Error(`Failed to create date with timezone (${timezone}): ${fallbackError instanceof Error ? fallbackError.message : String(fallbackError)}`) + logger.error('Error during fallback date creation:', fallbackError) + throw new Error( + `Failed to create date with timezone (${timezone}): ${fallbackError instanceof Error ? fallbackError.message : String(fallbackError)}` + ) } } } @@ -245,7 +242,10 @@ export function generateCronExpression( } case 'custom': { - const cronExpression = getSubBlockValue(scheduleValues as unknown as BlockState, 'cronExpression') + const cronExpression = getSubBlockValue( + scheduleValues as unknown as BlockState, + 'cronExpression' + ) if (!cronExpression) { throw new Error('No cron expression provided for custom schedule') } @@ -271,29 +271,31 @@ export function calculateNextRunTime( ): Date { // Get timezone (default to UTC) const timezone = scheduleValues.timezone || 'UTC' - + // Get the current time let baseDate = new Date() - + // If we have both a start date and time, use them together with timezone awareness if (scheduleValues.scheduleStartAt && scheduleValues.scheduleTime) { try { - logger.info(`Creating date with: startAt=${scheduleValues.scheduleStartAt}, time=${scheduleValues.scheduleTime}, timezone=${timezone}`) - + logger.info( + `Creating date with: startAt=${scheduleValues.scheduleStartAt}, time=${scheduleValues.scheduleTime}, timezone=${timezone}` + ) + const combinedDate = createDateWithTimezone( scheduleValues.scheduleStartAt, scheduleValues.scheduleTime, timezone ) - + logger.info(`Combined date result: ${combinedDate.toISOString()}`) - + // If the combined date is in the future, use it as our next run time if (combinedDate > baseDate) { return combinedDate } } catch (e) { - logger.error("Error combining scheduled date and time:", e) + logger.error('Error combining scheduled date and time:', e) } } // If only scheduleStartAt is set (without scheduleTime), parse it directly @@ -301,13 +303,13 @@ export function calculateNextRunTime( try { // Check if the date string already includes time information const startAtStr = scheduleValues.scheduleStartAt - const hasTimeComponent = startAtStr.includes('T') && - (startAtStr.includes(':') || startAtStr.includes('.')) - + const hasTimeComponent = + startAtStr.includes('T') && (startAtStr.includes(':') || startAtStr.includes('.')) + if (hasTimeComponent) { // If the string already has time info, parse it directly but with timezone awareness const startDate = new Date(startAtStr) - + // If it's a UTC ISO string (ends with Z), use it directly if (startAtStr.endsWith('Z') && timezone === 'UTC') { if (startDate > baseDate) { @@ -317,15 +319,15 @@ export function calculateNextRunTime( // For non-UTC dates or when timezone isn't UTC, we need to interpret it in the specified timezone // Extract time from the date string (crude but effective for ISO format) const timeMatch = startAtStr.match(/T(\d{2}:\d{2})/) - const timeStr = timeMatch ? timeMatch[1] : "00:00" - + const timeStr = timeMatch ? timeMatch[1] : '00:00' + // Use our timezone-aware function with the extracted time const tzAwareDate = createDateWithTimezone( startAtStr.split('T')[0], // Just the date part - timeStr, // Time extracted from string + timeStr, // Time extracted from string timezone ) - + if (tzAwareDate > baseDate) { return tzAwareDate } @@ -334,24 +336,24 @@ export function calculateNextRunTime( // If no time component in the string, use midnight in the specified timezone const startDate = createDateWithTimezone( scheduleValues.scheduleStartAt, - "00:00", // Use midnight in the specified timezone + '00:00', // Use midnight in the specified timezone timezone ) - + if (startDate > baseDate) { return startDate } } } catch (e) { - logger.error("Error parsing scheduleStartAt:", e) + logger.error('Error parsing scheduleStartAt:', e) } } - + // If we have a scheduleTime (but no future scheduleStartAt), use it for today const scheduleTimeOverride = scheduleValues.scheduleTime ? parseTimeString(scheduleValues.scheduleTime) : null - + // Create next run date based on the current date const nextRun = new Date(baseDate) @@ -394,7 +396,7 @@ export function calculateNextRunTime( if (nextRun <= now) { nextRun.setMinutes(nextRun.getMinutes() + minutesInterval) } - + return nextRun } @@ -409,7 +411,7 @@ export function calculateNextRunTime( if (nextRun <= new Date()) { nextRun.setHours(nextRun.getHours() + 1) } - + return nextRun } @@ -423,7 +425,7 @@ export function calculateNextRunTime( if (nextRun <= new Date()) { nextRun.setDate(nextRun.getDate() + 1) } - + return nextRun } @@ -437,7 +439,7 @@ export function calculateNextRunTime( while (nextRun.getDay() !== scheduleValues.weeklyDay || nextRun <= new Date()) { nextRun.setDate(nextRun.getDate() + 1) } - + return nextRun } @@ -453,7 +455,7 @@ export function calculateNextRunTime( if (nextRun <= new Date()) { nextRun.setMonth(nextRun.getMonth() + 1) } - + return nextRun } diff --git a/sim/lib/storage.ts b/apps/sim/lib/storage.ts similarity index 100% rename from sim/lib/storage.ts rename to apps/sim/lib/storage.ts diff --git a/sim/lib/subscription.ts b/apps/sim/lib/subscription.ts similarity index 73% rename from sim/lib/subscription.ts rename to apps/sim/lib/subscription.ts index 5fc2749f2..c2b681f08 100644 --- a/sim/lib/subscription.ts +++ b/apps/sim/lib/subscription.ts @@ -1,9 +1,9 @@ import { eq } from 'drizzle-orm' +import { isProd } from '@/lib/environment' +import { createLogger } from '@/lib/logs/console-logger' import { db } from '@/db' import * as schema from '@/db/schema' import { client } from './auth-client' -import { createLogger } from '@/lib/logs/console-logger' -import { isProd } from '@/lib/environment' const logger = createLogger('Subscription') @@ -16,43 +16,49 @@ export async function isProPlan(userId: string): Promise { if (!isProd) { return true } - + // First check organizations the user belongs to (prioritize org subscriptions) - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + // Check each organization for active Pro or Team subscriptions for (const membership of memberships) { - const orgSubscriptions = await db.select() + const orgSubscriptions = await db + .select() .from(schema.subscription) .where(eq(schema.subscription.referenceId, membership.organizationId)) - + const orgHasProPlan = orgSubscriptions.some( - sub => (sub.status === 'active') && (sub.plan === 'pro' || sub.plan === 'team') + (sub) => sub.status === 'active' && (sub.plan === 'pro' || sub.plan === 'team') ) - + if (orgHasProPlan) { - logger.info('User has pro plan via organization', { userId, orgId: membership.organizationId }) + logger.info('User has pro plan via organization', { + userId, + orgId: membership.organizationId, + }) return true } } - + // If no org subscriptions, check direct subscriptions - const directSubscriptions = await db.select() + const directSubscriptions = await db + .select() .from(schema.subscription) .where(eq(schema.subscription.referenceId, userId)) - + // Find active pro subscription (either Pro or Team plan) const hasDirectProPlan = directSubscriptions.some( - sub => (sub.status === 'active') && (sub.plan === 'pro' || sub.plan === 'team') + (sub) => sub.status === 'active' && (sub.plan === 'pro' || sub.plan === 'team') ) - + if (hasDirectProPlan) { logger.info('User has direct pro plan', { userId }) return true } - + return false } catch (error) { logger.error('Error checking pro plan status', { error, userId }) @@ -69,42 +75,45 @@ export async function isTeamPlan(userId: string): Promise { if (!isProd) { return true } - + // First check organizations the user belongs to (prioritize org subscriptions) - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + // Check each organization for active Team subscriptions for (const membership of memberships) { - const orgSubscriptions = await db.select() + const orgSubscriptions = await db + .select() .from(schema.subscription) .where(eq(schema.subscription.referenceId, membership.organizationId)) - + const orgHasTeamPlan = orgSubscriptions.some( - sub => (sub.status === 'active') && sub.plan === 'team' + (sub) => sub.status === 'active' && sub.plan === 'team' ) - + if (orgHasTeamPlan) { return true } } - - // If no org subscriptions found, check direct subscriptions - const directSubscriptions = await db.select() + + // If no org subscriptions found, check direct subscriptions + const directSubscriptions = await db + .select() .from(schema.subscription) .where(eq(schema.subscription.referenceId, userId)) - + // Find active team subscription const hasDirectTeamPlan = directSubscriptions.some( - sub => (sub.status === 'active') && sub.plan === 'team' + (sub) => sub.status === 'active' && sub.plan === 'team' ) - + if (hasDirectTeamPlan) { logger.info('User has direct team plan', { userId }) return true } - + return false } catch (error) { logger.error('Error checking team plan status', { error, userId }) @@ -121,66 +130,68 @@ export async function hasExceededCostLimit(userId: string): Promise { if (!isProd) { return false } - + // Get user's direct subscription const { data: directSubscriptions } = await client.subscription.list({ - query: { referenceId: userId } + query: { referenceId: userId }, }) - + // Find active direct subscription - const activeDirectSubscription = directSubscriptions?.find( - sub => sub.status === 'active' - ) - + const activeDirectSubscription = directSubscriptions?.find((sub) => sub.status === 'active') + // Get organizations the user belongs to - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + let highestCostLimit = 0 - + // Check cost limit from direct subscription if (activeDirectSubscription && typeof activeDirectSubscription.limits?.cost === 'number') { highestCostLimit = activeDirectSubscription.limits.cost } - + // Check cost limits from organization subscriptions for (const membership of memberships) { const { data: orgSubscriptions } = await client.subscription.list({ - query: { referenceId: membership.organizationId } + query: { referenceId: membership.organizationId }, }) - - const activeOrgSubscription = orgSubscriptions?.find( - sub => sub.status === 'active' - ) - - if (activeOrgSubscription && - typeof activeOrgSubscription.limits?.cost === 'number' && - activeOrgSubscription.limits.cost > highestCostLimit) { + + const activeOrgSubscription = orgSubscriptions?.find((sub) => sub.status === 'active') + + if ( + activeOrgSubscription && + typeof activeOrgSubscription.limits?.cost === 'number' && + activeOrgSubscription.limits.cost > highestCostLimit + ) { highestCostLimit = activeOrgSubscription.limits.cost } } - + // If no subscription found, use default free tier limit if (highestCostLimit === 0) { - highestCostLimit = process.env.FREE_TIER_COST_LIMIT - ? parseFloat(process.env.FREE_TIER_COST_LIMIT) + highestCostLimit = process.env.FREE_TIER_COST_LIMIT + ? parseFloat(process.env.FREE_TIER_COST_LIMIT) : 5 } - + logger.info('User cost limit from subscription', { userId, costLimit: highestCostLimit }) - + // Get user's actual usage from the database - const statsRecords = await db.select().from(schema.userStats).where(eq(schema.userStats.userId, userId)) - + const statsRecords = await db + .select() + .from(schema.userStats) + .where(eq(schema.userStats.userId, userId)) + if (statsRecords.length === 0) { // No usage yet, so they haven't exceeded the limit return false } - + // Get the current cost and compare with the limit const currentCost = parseFloat(statsRecords[0].totalCost.toString()) - + return currentCost >= highestCostLimit } catch (error) { logger.error('Error checking cost limit', { error, userId }) @@ -197,41 +208,38 @@ export async function isSharingEnabled(userId: string): Promise { if (!isProd) { return true } - + // Check direct subscription const { data: directSubscriptions } = await client.subscription.list({ - query: { referenceId: userId } + query: { referenceId: userId }, }) - - const activeDirectSubscription = directSubscriptions?.find( - sub => sub.status === 'active' - ) - + + const activeDirectSubscription = directSubscriptions?.find((sub) => sub.status === 'active') + // If user has direct pro/team subscription with sharing enabled if (activeDirectSubscription && activeDirectSubscription.limits?.sharingEnabled) { return true } - + // Check organizations the user belongs to - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + // Check each organization for a subscription with sharing enabled for (const membership of memberships) { const { data: orgSubscriptions } = await client.subscription.list({ - query: { referenceId: membership.organizationId } + query: { referenceId: membership.organizationId }, }) - - const activeOrgSubscription = orgSubscriptions?.find( - sub => sub.status === 'active' - ) - + + const activeOrgSubscription = orgSubscriptions?.find((sub) => sub.status === 'active') + if (activeOrgSubscription && activeOrgSubscription.limits?.sharingEnabled) { return true } } - + return false } catch (error) { logger.error('Error checking sharing permission', { error, userId }) @@ -248,41 +256,38 @@ export async function isMultiplayerEnabled(userId: string): Promise { if (!isProd) { return true } - + // Check direct subscription const { data: directSubscriptions } = await client.subscription.list({ - query: { referenceId: userId } + query: { referenceId: userId }, }) - - const activeDirectSubscription = directSubscriptions?.find( - sub => sub.status === 'active' - ) - + + const activeDirectSubscription = directSubscriptions?.find((sub) => sub.status === 'active') + // If user has direct team subscription with multiplayer enabled if (activeDirectSubscription && activeDirectSubscription.limits?.multiplayerEnabled) { return true } - + // Check organizations the user belongs to - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + // Check each organization for a subscription with multiplayer enabled for (const membership of memberships) { const { data: orgSubscriptions } = await client.subscription.list({ - query: { referenceId: membership.organizationId } + query: { referenceId: membership.organizationId }, }) - - const activeOrgSubscription = orgSubscriptions?.find( - sub => sub.status === 'active' - ) - + + const activeOrgSubscription = orgSubscriptions?.find((sub) => sub.status === 'active') + if (activeOrgSubscription && activeOrgSubscription.limits?.multiplayerEnabled) { return true } } - + return false } catch (error) { logger.error('Error checking multiplayer permission', { error, userId }) @@ -299,44 +304,44 @@ export async function isWorkspaceCollaborationEnabled(userId: string): Promise sub.status === 'active' - ) - + + const activeDirectSubscription = directSubscriptions?.find((sub) => sub.status === 'active') + // If user has direct team subscription with workspace collaboration enabled - if (activeDirectSubscription && activeDirectSubscription.limits?.workspaceCollaborationEnabled) { + if ( + activeDirectSubscription && + activeDirectSubscription.limits?.workspaceCollaborationEnabled + ) { return true } - + // Check organizations the user belongs to - const memberships = await db.select() + const memberships = await db + .select() .from(schema.member) .where(eq(schema.member.userId, userId)) - + // Check each organization for a subscription with workspace collaboration enabled for (const membership of memberships) { const { data: orgSubscriptions } = await client.subscription.list({ - query: { referenceId: membership.organizationId } + query: { referenceId: membership.organizationId }, }) - - const activeOrgSubscription = orgSubscriptions?.find( - sub => sub.status === 'active' - ) - + + const activeOrgSubscription = orgSubscriptions?.find((sub) => sub.status === 'active') + if (activeOrgSubscription && activeOrgSubscription.limits?.workspaceCollaborationEnabled) { return true } } - + return false } catch (error) { logger.error('Error checking workspace collaboration permission', { error, userId }) return false // Be conservative in case of error } -} \ No newline at end of file +} diff --git a/sim/lib/telemetry.ts b/apps/sim/lib/telemetry.ts similarity index 68% rename from sim/lib/telemetry.ts rename to apps/sim/lib/telemetry.ts index 59c1b7b37..5793ef404 100644 --- a/sim/lib/telemetry.ts +++ b/apps/sim/lib/telemetry.ts @@ -1,16 +1,15 @@ /** * Sim Studio Telemetry - * + * * This file can be customized in forked repositories: - * - Set TELEMETRY_ENDPOINT in telemetry.config.js to your collector + * - Set TELEMETRY_ENDPOINT in telemetry.config.ts to your collector * - Modify allowed event categories as needed * - Edit disclosure text to match your privacy policy - * + * * Please maintain ethical telemetry practices if modified. */ - -import { createLogger } from '@/lib/logs/console-logger' import { diag, DiagConsoleLogger, DiagLogLevel } from '@opentelemetry/api' +import { createLogger } from '@/lib/logs/console-logger' diag.setLogger(new DiagConsoleLogger(), DiagLogLevel.ERROR) @@ -47,12 +46,12 @@ export function getTelemetryStatus(): TelemetryStatus { if (typeof window === 'undefined') { return { enabled: true, notifiedUser: false } } - + try { if (process.env.NEXT_TELEMETRY_DISABLED === '1') { return { enabled: false, notifiedUser: true } } - + const stored = localStorage.getItem(TELEMETRY_STATUS_KEY) return stored ? JSON.parse(stored) : { enabled: true, notifiedUser: false } } catch (error) { @@ -68,10 +67,10 @@ export function setTelemetryStatus(status: TelemetryStatus): void { if (typeof window === 'undefined') { return } - + try { localStorage.setItem(TELEMETRY_STATUS_KEY, JSON.stringify(status)) - + if (status.enabled && !telemetryInitialized) { initializeClientTelemetry() } @@ -96,7 +95,7 @@ export function disableTelemetry(): void { if (currentStatus.enabled) { trackEvent('consent', 'opt_out') } - + setTelemetryStatus({ enabled: false, notifiedUser: true }) logger.info('Telemetry disabled') } @@ -109,15 +108,15 @@ export function enableTelemetry(): void { logger.info('Telemetry disabled by environment variable, cannot enable') return } - + const currentStatus = getTelemetryStatus() if (!currentStatus.enabled) { trackEvent('consent', 'opt_in') } - + setTelemetryStatus({ enabled: true, notifiedUser: true }) logger.info('Telemetry enabled') - + if (!telemetryInitialized) { initializeClientTelemetry() } @@ -132,89 +131,117 @@ function initializeClientTelemetry(): void { if (typeof window === 'undefined' || telemetryInitialized) { return } - + try { - const clientSideEnabled = (window as any).__SIM_STUDIO_TELEMETRY_CONFIG?.clientSide?.enabled !== false - + const clientSideEnabled = + (window as any).__SIM_STUDIO_TELEMETRY_CONFIG?.clientSide?.enabled !== false + if (!clientSideEnabled) { logger.info('Client-side telemetry disabled in configuration') return } - + if (process.env.NODE_ENV === 'production') { trackEvent('page_view', window.location.pathname) - + if (typeof window.history !== 'undefined') { const originalPushState = window.history.pushState - window.history.pushState = function(...args) { + window.history.pushState = function (...args) { const result = originalPushState.apply(this, args) trackEvent('page_view', window.location.pathname) return result } } - + if (typeof window.performance !== 'undefined') { window.addEventListener('load', () => { setTimeout(() => { if (performance.getEntriesByType) { - const navigationTiming = performance.getEntriesByType('navigation')[0] as PerformanceNavigationTiming + const navigationTiming = performance.getEntriesByType( + 'navigation' + )[0] as PerformanceNavigationTiming if (navigationTiming) { - trackEvent('performance', 'page_load', window.location.pathname, - navigationTiming.loadEventEnd - navigationTiming.startTime) + trackEvent( + 'performance', + 'page_load', + window.location.pathname, + navigationTiming.loadEventEnd - navigationTiming.startTime + ) } - - const lcpEntries = performance.getEntriesByType('paint') - .filter(entry => entry.name === 'largest-contentful-paint') + + const lcpEntries = performance + .getEntriesByType('paint') + .filter((entry) => entry.name === 'largest-contentful-paint') if (lcpEntries.length > 0) { - trackEvent('performance', 'largest_contentful_paint', - window.location.pathname, lcpEntries[0].startTime) + trackEvent( + 'performance', + 'largest_contentful_paint', + window.location.pathname, + lcpEntries[0].startTime + ) } } }, 0) }) } - - document.addEventListener('click', (e) => { - let target = e.target as HTMLElement | null - let telemetryAction = null - - while (target && !telemetryAction) { - telemetryAction = target.getAttribute('data-telemetry') - if (!telemetryAction) { - target = target.parentElement + + document.addEventListener( + 'click', + (e) => { + let target = e.target as HTMLElement | null + let telemetryAction = null + + while (target && !telemetryAction) { + telemetryAction = target.getAttribute('data-telemetry') + if (!telemetryAction) { + target = target.parentElement + } } - } - - if (telemetryAction) { - trackEvent('feature_usage', telemetryAction) - } - }, { passive: true }) - - document.addEventListener('submit', (e) => { - const form = e.target as HTMLFormElement - const telemetryAction = form.getAttribute('data-telemetry') - if (telemetryAction) { - trackEvent('feature_usage', telemetryAction) - } - }, { passive: true }) - - window.addEventListener('error', (event) => { - const errorDetails = { - message: event.error?.message || 'Unknown error', - stack: event.error?.stack?.split('\n')[0] || '', - url: window.location.pathname, - } - trackEvent('error', 'client_error', errorDetails.message) - }, { passive: true }) - - window.addEventListener('unhandledrejection', (event) => { - const errorDetails = { - message: event.reason?.message || String(event.reason) || 'Unhandled promise rejection', - url: window.location.pathname, - } - trackEvent('error', 'unhandled_rejection', errorDetails.message) - }, { passive: true }) - + + if (telemetryAction) { + trackEvent('feature_usage', telemetryAction) + } + }, + { passive: true } + ) + + document.addEventListener( + 'submit', + (e) => { + const form = e.target as HTMLFormElement + const telemetryAction = form.getAttribute('data-telemetry') + if (telemetryAction) { + trackEvent('feature_usage', telemetryAction) + } + }, + { passive: true } + ) + + window.addEventListener( + 'error', + (event) => { + const errorDetails = { + message: event.error?.message || 'Unknown error', + stack: event.error?.stack?.split('\n')[0] || '', + url: window.location.pathname, + } + trackEvent('error', 'client_error', errorDetails.message) + }, + { passive: true } + ) + + window.addEventListener( + 'unhandledrejection', + (event) => { + const errorDetails = { + message: event.reason?.message || String(event.reason) || 'Unhandled promise rejection', + url: window.location.pathname, + } + trackEvent('error', 'unhandled_rejection', errorDetails.message) + }, + { passive: true } + ) + logger.info('Enhanced client-side telemetry initialized') telemetryInitialized = true } @@ -226,11 +253,16 @@ function initializeClientTelemetry(): void { /** * Track a telemetry event */ -export async function trackEvent(category: string, action: string, label?: string, value?: number): Promise { +export async function trackEvent( + category: string, + action: string, + label?: string, + value?: number +): Promise { const status = getTelemetryStatus() - + if (!status.enabled) return - + try { if (process.env.NODE_ENV === 'production') { await fetch('/api/telemetry', { @@ -256,4 +288,4 @@ export async function trackEvent(category: string, action: string, label?: strin } catch (error) { logger.error('Failed to track telemetry event', error) } -} \ No newline at end of file +} diff --git a/sim/lib/uploads/s3-client.test.ts b/apps/sim/lib/uploads/s3-client.test.ts similarity index 91% rename from sim/lib/uploads/s3-client.test.ts rename to apps/sim/lib/uploads/s3-client.test.ts index 441ec8a0c..046e55834 100644 --- a/sim/lib/uploads/s3-client.test.ts +++ b/apps/sim/lib/uploads/s3-client.test.ts @@ -5,38 +5,38 @@ */ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { - S3Client, - PutObjectCommand, + DeleteObjectCommand, GetObjectCommand, - DeleteObjectCommand + PutObjectCommand, + S3Client, } from '@aws-sdk/client-s3' import { getSignedUrl } from '@aws-sdk/s3-request-presigner' import { - uploadToS3, - getPresignedUrl, - downloadFromS3, deleteFromS3, + downloadFromS3, + FileInfo, + getPresignedUrl, s3Client, - FileInfo + uploadToS3, } from './s3-client' // Mock AWS SDK vi.mock('@aws-sdk/client-s3', () => { const mockSend = vi.fn() const mockS3Client = vi.fn().mockImplementation(() => ({ - send: mockSend + send: mockSend, })) return { S3Client: mockS3Client, PutObjectCommand: vi.fn(), GetObjectCommand: vi.fn(), - DeleteObjectCommand: vi.fn() + DeleteObjectCommand: vi.fn(), } }) vi.mock('@aws-sdk/s3-request-presigner', () => ({ - getSignedUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url') + getSignedUrl: vi.fn().mockResolvedValue('https://example.com/presigned-url'), })) // Mock date for predictable timestamps @@ -44,7 +44,7 @@ vi.mock('./setup', () => ({ S3_CONFIG: { bucket: 'test-bucket', region: 'test-region', - } + }, })) // Mock logger @@ -53,8 +53,8 @@ vi.mock('@/lib/logs/console-logger', () => ({ info: vi.fn(), error: vi.fn(), warn: vi.fn(), - debug: vi.fn() - }) + debug: vi.fn(), + }), })) describe('S3 Client', () => { @@ -79,7 +79,7 @@ describe('S3 Client', () => { it('should upload a file to S3 and return file info', async () => { // Mock S3 client send method to return an appropriate type vi.mocked(s3Client.send).mockResolvedValueOnce({ - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const testFile = Buffer.from('test file content') @@ -97,8 +97,8 @@ describe('S3 Client', () => { ContentType: contentType, Metadata: { originalName: fileName, - uploadedAt: expect.any(String) - } + uploadedAt: expect.any(String), + }, }) expect(s3Client.send).toHaveBeenCalledTimes(1) @@ -109,13 +109,13 @@ describe('S3 Client', () => { key: expect.stringContaining('test-file.txt'), name: fileName, size: fileSize, - type: contentType + type: contentType, }) }) it('should handle spaces in filenames', async () => { vi.mocked(s3Client.send).mockResolvedValueOnce({ - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const testFile = Buffer.from('test file content') @@ -131,7 +131,7 @@ describe('S3 Client', () => { it('should use provided size if available', async () => { vi.mocked(s3Client.send).mockResolvedValueOnce({ - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const testFile = Buffer.from('test file content') @@ -165,14 +165,10 @@ describe('S3 Client', () => { expect(GetObjectCommand).toHaveBeenCalledWith({ Bucket: 'test-bucket', - Key: key + Key: key, }) - expect(getSignedUrl).toHaveBeenCalledWith( - s3Client, - expect.any(Object), - { expiresIn } - ) + expect(getSignedUrl).toHaveBeenCalledWith(s3Client, expect.any(Object), { expiresIn }) expect(url).toBe('https://example.com/presigned-url') }) @@ -212,12 +208,12 @@ describe('S3 Client', () => { callback() } return mockStream - }) + }), } vi.mocked(s3Client.send).mockResolvedValueOnce({ Body: mockStream, - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const key = 'test-file.txt' @@ -225,12 +221,14 @@ describe('S3 Client', () => { expect(GetObjectCommand).toHaveBeenCalledWith({ Bucket: 'test-bucket', - Key: key + Key: key, }) expect(s3Client.send).toHaveBeenCalledTimes(1) expect(result).toBeInstanceOf(Buffer) - expect(Buffer.concat([Buffer.from('chunk1'), Buffer.from('chunk2')]).toString()).toEqual(result.toString()) + expect(Buffer.concat([Buffer.from('chunk1'), Buffer.from('chunk2')]).toString()).toEqual( + result.toString() + ) }) it('should handle stream errors', async () => { @@ -240,12 +238,12 @@ describe('S3 Client', () => { callback(new Error('Stream error')) } return mockStream - }) + }), } vi.mocked(s3Client.send).mockResolvedValueOnce({ Body: mockStream, - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const key = 'test-file.txt' @@ -264,7 +262,7 @@ describe('S3 Client', () => { describe('deleteFromS3', () => { it('should delete a file from S3', async () => { vi.mocked(s3Client.send).mockResolvedValueOnce({ - $metadata: { httpStatusCode: 200 } + $metadata: { httpStatusCode: 200 }, } as any) const key = 'test-file.txt' @@ -272,7 +270,7 @@ describe('S3 Client', () => { expect(DeleteObjectCommand).toHaveBeenCalledWith({ Bucket: 'test-bucket', - Key: key + Key: key, }) expect(s3Client.send).toHaveBeenCalledTimes(1) @@ -298,4 +296,4 @@ describe('S3 Client', () => { // So instead of checking constructor call, check that mocked client exists }) }) -}) \ No newline at end of file +}) diff --git a/sim/lib/uploads/s3-client.ts b/apps/sim/lib/uploads/s3-client.ts similarity index 72% rename from sim/lib/uploads/s3-client.ts rename to apps/sim/lib/uploads/s3-client.ts index 8422981c8..ffa39cf6c 100644 --- a/sim/lib/uploads/s3-client.ts +++ b/apps/sim/lib/uploads/s3-client.ts @@ -1,4 +1,9 @@ -import { S3Client, PutObjectCommand, GetObjectCommand, DeleteObjectCommand } from '@aws-sdk/client-s3' +import { + DeleteObjectCommand, + GetObjectCommand, + PutObjectCommand, + S3Client, +} from '@aws-sdk/client-s3' import { getSignedUrl } from '@aws-sdk/s3-request-presigner' import { S3_CONFIG } from './setup' @@ -7,19 +12,19 @@ export const s3Client = new S3Client({ region: S3_CONFIG.region || '', credentials: { accessKeyId: process.env.AWS_ACCESS_KEY_ID || '', - secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '' - } + secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '', + }, }) /** * File information structure */ export interface FileInfo { - path: string // Path to access the file - key: string // S3 key or local filename - name: string // Original filename - size: number // File size in bytes - type: string // MIME type + path: string // Path to access the file + key: string // S3 key or local filename + name: string // Original filename + size: number // File size in bytes + type: string // MIME type } /** @@ -31,8 +36,8 @@ export interface FileInfo { * @returns Object with file information */ export async function uploadToS3( - file: Buffer, - fileName: string, + file: Buffer, + fileName: string, contentType: string, size?: number ): Promise { @@ -40,29 +45,31 @@ export async function uploadToS3( // Use a simple timestamp without directory structure const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens const uniqueKey = `${Date.now()}-${safeFileName}` - + // Upload the file to S3 - await s3Client.send(new PutObjectCommand({ - Bucket: S3_CONFIG.bucket, - Key: uniqueKey, - Body: file, - ContentType: contentType, - // Add some useful metadata - Metadata: { - originalName: fileName, - uploadedAt: new Date().toISOString() - } - })) + await s3Client.send( + new PutObjectCommand({ + Bucket: S3_CONFIG.bucket, + Key: uniqueKey, + Body: file, + ContentType: contentType, + // Add some useful metadata + Metadata: { + originalName: fileName, + uploadedAt: new Date().toISOString(), + }, + }) + ) // Create a path for API to serve the file const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}` - + return { path: servePath, key: uniqueKey, name: fileName, size: size ?? file.length, - type: contentType + type: contentType, } } @@ -75,9 +82,9 @@ export async function uploadToS3( export async function getPresignedUrl(key: string, expiresIn = 3600) { const command = new GetObjectCommand({ Bucket: S3_CONFIG.bucket, - Key: key + Key: key, }) - + return getSignedUrl(s3Client, command, { expiresIn }) } @@ -89,12 +96,12 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) { export async function downloadFromS3(key: string) { const command = new GetObjectCommand({ Bucket: S3_CONFIG.bucket, - Key: key + Key: key, }) - + const response = await s3Client.send(command) const stream = response.Body as any - + // Convert stream to buffer return new Promise((resolve, reject) => { const chunks: Buffer[] = [] @@ -109,8 +116,10 @@ export async function downloadFromS3(key: string) { * @param key S3 object key */ export async function deleteFromS3(key: string) { - await s3Client.send(new DeleteObjectCommand({ - Bucket: S3_CONFIG.bucket, - Key: key - })) -} \ No newline at end of file + await s3Client.send( + new DeleteObjectCommand({ + Bucket: S3_CONFIG.bucket, + Key: key, + }) + ) +} diff --git a/sim/lib/uploads/setup.server.ts b/apps/sim/lib/uploads/setup.server.ts similarity index 96% rename from sim/lib/uploads/setup.server.ts rename to apps/sim/lib/uploads/setup.server.ts index 8f026eed4..9dba0b0b9 100644 --- a/sim/lib/uploads/setup.server.ts +++ b/apps/sim/lib/uploads/setup.server.ts @@ -1,5 +1,5 @@ -import { ensureUploadsDirectory, USE_S3_STORAGE } from './setup' import { createLogger } from '@/lib/logs/console-logger' +import { ensureUploadsDirectory, USE_S3_STORAGE } from './setup' const logger = createLogger('UploadsSetup') @@ -7,7 +7,7 @@ const logger = createLogger('UploadsSetup') if (typeof process !== 'undefined') { // Log storage mode logger.info(`Storage mode: ${USE_S3_STORAGE ? 'S3' : 'Local'}`) - + if (USE_S3_STORAGE) { // Verify AWS credentials if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) { @@ -28,4 +28,4 @@ if (typeof process !== 'undefined') { } } -export default ensureUploadsDirectory \ No newline at end of file +export default ensureUploadsDirectory diff --git a/sim/lib/uploads/setup.ts b/apps/sim/lib/uploads/setup.ts similarity index 99% rename from sim/lib/uploads/setup.ts rename to apps/sim/lib/uploads/setup.ts index 09963a6b1..0541c605f 100644 --- a/sim/lib/uploads/setup.ts +++ b/apps/sim/lib/uploads/setup.ts @@ -39,4 +39,4 @@ export async function ensureUploadsDirectory() { logger.error('Failed to create uploads directory:', error) return false } -} \ No newline at end of file +} diff --git a/sim/lib/urls/utils.ts b/apps/sim/lib/urls/utils.ts similarity index 98% rename from sim/lib/urls/utils.ts rename to apps/sim/lib/urls/utils.ts index e817b1883..708012425 100644 --- a/sim/lib/urls/utils.ts +++ b/apps/sim/lib/urls/utils.ts @@ -6,18 +6,18 @@ export function getBaseUrl(): string { if (typeof window !== 'undefined') { return window.location.origin } - + const baseUrl = process.env.NEXT_PUBLIC_APP_URL if (baseUrl) { if (baseUrl.startsWith('http://') || baseUrl.startsWith('https://')) { return baseUrl } - + const isProd = process.env.NODE_ENV === 'production' const protocol = isProd ? 'https://' : 'http://' return `${protocol}${baseUrl}` } - + return 'http://localhost:3000' } @@ -33,4 +33,4 @@ export function getBaseDomain(): string { const isProd = process.env.NODE_ENV === 'production' return isProd ? 'simstudio.ai' : 'localhost:3000' } -} \ No newline at end of file +} diff --git a/sim/lib/usage-monitor.ts b/apps/sim/lib/usage-monitor.ts similarity index 73% rename from sim/lib/usage-monitor.ts rename to apps/sim/lib/usage-monitor.ts index d57e74a01..e9dfa9900 100644 --- a/sim/lib/usage-monitor.ts +++ b/apps/sim/lib/usage-monitor.ts @@ -1,9 +1,9 @@ -import { isProPlan, isTeamPlan } from './subscription' -import { createLogger } from './logs/console-logger' -import { db } from '@/db' import { eq } from 'drizzle-orm' -import { userStats, member, organization as organizationTable, subscription } from '@/db/schema' import { isProd } from '@/lib/environment' +import { db } from '@/db' +import { member, organization as organizationTable, subscription, userStats } from '@/db/schema' +import { createLogger } from './logs/console-logger' +import { isProPlan, isTeamPlan } from './subscription' const logger = createLogger('UsageMonitor') @@ -25,50 +25,49 @@ interface UsageData { async function getTeamSeats(userId: string): Promise { try { // First check if user is part of an organization with a team subscription - const memberships = await db.select() - .from(member) - .where(eq(member.userId, userId)) - .limit(1) - + const memberships = await db.select().from(member).where(eq(member.userId, userId)).limit(1) + if (memberships.length > 0) { const orgId = memberships[0].organizationId - + // Check for organization's team subscription - const orgSubscriptions = await db.select() + const orgSubscriptions = await db + .select() .from(subscription) .where(eq(subscription.referenceId, orgId)) - + const teamSubscription = orgSubscriptions.find( - sub => (sub.status === 'active' && sub.plan === 'team') + (sub) => sub.status === 'active' && sub.plan === 'team' ) - + if (teamSubscription?.seats) { - logger.info('Found organization team subscription with seats', { - userId, + logger.info('Found organization team subscription with seats', { + userId, orgId, - seats: teamSubscription.seats + seats: teamSubscription.seats, }) return teamSubscription.seats } } - + // If no organization team subscription, check for personal team subscription - const userSubscriptions = await db.select() + const userSubscriptions = await db + .select() .from(subscription) .where(eq(subscription.referenceId, userId)) - + const teamSubscription = userSubscriptions.find( - sub => (sub.status === 'active' && sub.plan === 'team') + (sub) => sub.status === 'active' && sub.plan === 'team' ) - + if (teamSubscription?.seats) { - logger.info('Found personal team subscription with seats', { - userId, - seats: teamSubscription.seats + logger.info('Found personal team subscription with seats', { + userId, + seats: teamSubscription.seats, }) return teamSubscription.seats } - + // Default to 10 seats if we know they're on a team plan but couldn't get seats info return 10 } catch (error) { @@ -85,121 +84,116 @@ async function getTeamSeats(userId: string): Promise { export async function checkUsageStatus(userId: string): Promise { try { logger.info('Starting usage status check for user', { userId }) - + // In development, always return permissive limits if (!isProd) { // Get actual usage from the database for display purposes const statsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId)) - const currentUsage = statsRecords.length > 0 - ? parseFloat(statsRecords[0].totalCost.toString()) - : 0 - + const currentUsage = + statsRecords.length > 0 ? parseFloat(statsRecords[0].totalCost.toString()) : 0 + return { percentUsed: Math.min(Math.round((currentUsage / 1000) * 100), 100), isWarning: false, isExceeded: false, currentUsage, - limit: 1000 + limit: 1000, } } - + // Production environment - check real subscription limits - + // Get user's subscription details const isPro = await isProPlan(userId) const isTeam = await isTeamPlan(userId) - + logger.info('User subscription status', { userId, isPro, isTeam }) - + // Determine the limit based on subscription type let limit: number - + if (isTeam) { // For team plans, get the number of seats and multiply by per-seat limit const teamSeats = await getTeamSeats(userId) - const perSeatLimit = process.env.TEAM_TIER_COST_LIMIT - ? parseFloat(process.env.TEAM_TIER_COST_LIMIT) + const perSeatLimit = process.env.TEAM_TIER_COST_LIMIT + ? parseFloat(process.env.TEAM_TIER_COST_LIMIT) : 40 - + limit = perSeatLimit * teamSeats - - logger.info('Using team plan limit', { - userId, + + logger.info('Using team plan limit', { + userId, seats: teamSeats, perSeatLimit, - totalLimit: limit + totalLimit: limit, }) } else if (isPro) { // Pro plan has a fixed limit - limit = process.env.PRO_TIER_COST_LIMIT - ? parseFloat(process.env.PRO_TIER_COST_LIMIT) - : 20 - + limit = process.env.PRO_TIER_COST_LIMIT ? parseFloat(process.env.PRO_TIER_COST_LIMIT) : 20 + logger.info('Using pro plan limit', { userId, limit }) } else { // Free tier limit - limit = process.env.FREE_TIER_COST_LIMIT - ? parseFloat(process.env.FREE_TIER_COST_LIMIT) - : 5 - + limit = process.env.FREE_TIER_COST_LIMIT ? parseFloat(process.env.FREE_TIER_COST_LIMIT) : 5 + logger.info('Using free tier limit', { userId, limit }) } - + // Get actual usage from the database const statsRecords = await db.select().from(userStats).where(eq(userStats.userId, userId)) - + // If no stats record exists, create a default one if (statsRecords.length === 0) { logger.info('No usage stats found for user', { userId, limit }) - + return { percentUsed: 0, isWarning: false, isExceeded: false, currentUsage: 0, - limit + limit, } } - + // Get the current cost from the user stats const currentUsage = parseFloat(statsRecords[0].totalCost.toString()) - + // Calculate percentage used const percentUsed = Math.min(Math.round((currentUsage / limit) * 100), 100) - + // Check if usage exceeds threshold or limit const isWarning = percentUsed >= WARNING_THRESHOLD && percentUsed < 100 const isExceeded = currentUsage >= limit - - logger.info('Final usage statistics', { - userId, - currentUsage, - limit, - percentUsed, - isWarning, - isExceeded + + logger.info('Final usage statistics', { + userId, + currentUsage, + limit, + percentUsed, + isWarning, + isExceeded, }) - + return { percentUsed, isWarning, isExceeded, currentUsage, - limit + limit, } } catch (error) { - logger.error('Error checking usage status', { - error: error instanceof Error ? { message: error.message, stack: error.stack } : error, - userId + logger.error('Error checking usage status', { + error: error instanceof Error ? { message: error.message, stack: error.stack } : error, + userId, }) - + // Return default values in case of error return { percentUsed: 0, isWarning: false, isExceeded: false, currentUsage: 0, - limit: 0 + limit: 0, } } } @@ -214,42 +208,48 @@ export async function checkAndNotifyUsage(userId: string): Promise { if (!isProd) { return } - + const usageData = await checkUsageStatus(userId) - + if (usageData.isExceeded) { // User has exceeded their limit - logger.warn('User has exceeded usage limits', { - userId, - usage: usageData.currentUsage, - limit: usageData.limit - }) - - // Dispatch event to show a UI notification - if (typeof window !== 'undefined') { - window.dispatchEvent(new CustomEvent('usage-exceeded', { - detail: { usageData } - })) - } - } else if (usageData.isWarning) { - // User is approaching their limit - logger.info('User approaching usage limits', { + logger.warn('User has exceeded usage limits', { userId, usage: usageData.currentUsage, limit: usageData.limit, - percent: usageData.percentUsed }) - + // Dispatch event to show a UI notification if (typeof window !== 'undefined') { - window.dispatchEvent(new CustomEvent('usage-warning', { - detail: { usageData } - })) - + window.dispatchEvent( + new CustomEvent('usage-exceeded', { + detail: { usageData }, + }) + ) + } + } else if (usageData.isWarning) { + // User is approaching their limit + logger.info('User approaching usage limits', { + userId, + usage: usageData.currentUsage, + limit: usageData.limit, + percent: usageData.percentUsed, + }) + + // Dispatch event to show a UI notification + if (typeof window !== 'undefined') { + window.dispatchEvent( + new CustomEvent('usage-warning', { + detail: { usageData }, + }) + ) + // Optionally open the subscription tab in settings - window.dispatchEvent(new CustomEvent('open-settings', { - detail: { tab: 'subscription' } - })) + window.dispatchEvent( + new CustomEvent('open-settings', { + detail: { tab: 'subscription' }, + }) + ) } } } catch (error) { @@ -260,15 +260,15 @@ export async function checkAndNotifyUsage(userId: string): Promise { /** * Server-side function to check if a user has exceeded their usage limits * For use in API routes, webhooks, and scheduled executions - * + * * @param userId The ID of the user to check * @returns An object containing the exceeded status and usage details */ export async function checkServerSideUsageLimits(userId: string): Promise<{ - isExceeded: boolean; - currentUsage: number; - limit: number; - message?: string; + isExceeded: boolean + currentUsage: number + limit: number + message?: string }> { try { // In development, always allow execution @@ -279,32 +279,32 @@ export async function checkServerSideUsageLimits(userId: string): Promise<{ limit: 1000, } } - + logger.info('Server-side checking usage limits for user', { userId }) - + // Get usage data using the same function we use for client-side const usageData = await checkUsageStatus(userId) - + return { isExceeded: usageData.isExceeded, currentUsage: usageData.currentUsage, limit: usageData.limit, - message: usageData.isExceeded + message: usageData.isExceeded ? `Usage limit exceeded: ${usageData.currentUsage.toFixed(2)}$ used of ${usageData.limit}$ limit. Please upgrade your plan to continue.` - : undefined + : undefined, } } catch (error) { - logger.error('Error in server-side usage limit check', { - error: error instanceof Error ? { message: error.message, stack: error.stack } : error, - userId + logger.error('Error in server-side usage limit check', { + error: error instanceof Error ? { message: error.message, stack: error.stack } : error, + userId, }) - + // Be conservative in case of error - allow execution but log the issue return { isExceeded: false, currentUsage: 0, limit: 0, - message: `Error checking usage limits: ${error instanceof Error ? error.message : String(error)}` + message: `Error checking usage limits: ${error instanceof Error ? error.message : String(error)}`, } } -} \ No newline at end of file +} diff --git a/sim/lib/utils.test.ts b/apps/sim/lib/utils.test.ts similarity index 100% rename from sim/lib/utils.test.ts rename to apps/sim/lib/utils.test.ts diff --git a/sim/lib/utils.ts b/apps/sim/lib/utils.ts similarity index 98% rename from sim/lib/utils.ts rename to apps/sim/lib/utils.ts index 43d04cd06..10e60ed01 100644 --- a/sim/lib/utils.ts +++ b/apps/sim/lib/utils.ts @@ -130,7 +130,7 @@ export function convertScheduleOptionsToCron( */ export function getTimezoneAbbreviation(timezone: string, date: Date = new Date()): string { if (timezone === 'UTC') return 'UTC' - + // Common timezone mappings const timezoneMap: Record = { 'America/Los_Angeles': { standard: 'PST', daylight: 'PDT' }, @@ -143,45 +143,45 @@ export function getTimezoneAbbreviation(timezone: string, date: Date = new Date( 'Australia/Sydney': { standard: 'AEST', daylight: 'AEDT' }, 'Asia/Singapore': { standard: 'SGT', daylight: 'SGT' }, // Singapore doesn't use DST } - + // If we have a mapping for this timezone if (timezone in timezoneMap) { // January 1 is guaranteed to be standard time in northern hemisphere // July 1 is guaranteed to be daylight time in northern hemisphere (if observed) const januaryDate = new Date(date.getFullYear(), 0, 1) const julyDate = new Date(date.getFullYear(), 6, 1) - + // Get offset in January (standard time) const januaryFormatter = new Intl.DateTimeFormat('en-US', { timeZone: timezone, - timeZoneName: 'short' + timeZoneName: 'short', }) - + // Get offset in July (likely daylight time) const julyFormatter = new Intl.DateTimeFormat('en-US', { timeZone: timezone, - timeZoneName: 'short' + timeZoneName: 'short', }) - + // If offsets are different, timezone observes DST const isDSTObserved = januaryFormatter.format(januaryDate) !== julyFormatter.format(julyDate) - + // If DST is observed, check if current date is in DST by comparing its offset // with January's offset (standard time) if (isDSTObserved) { const currentFormatter = new Intl.DateTimeFormat('en-US', { timeZone: timezone, - timeZoneName: 'short' + timeZoneName: 'short', }) - + const isDST = currentFormatter.format(date) !== januaryFormatter.format(januaryDate) return isDST ? timezoneMap[timezone].daylight : timezoneMap[timezone].standard } - + // If DST is not observed, always use standard return timezoneMap[timezone].standard } - + // For unknown timezones, use full IANA name return timezone } @@ -202,13 +202,13 @@ export function formatDateTime(date: Date, timezone?: string): string { hour12: true, timeZone: timezone || undefined, }) - + // If timezone is provided, add a friendly timezone abbreviation if (timezone) { const tzAbbr = getTimezoneAbbreviation(timezone, date) return `${formattedDate} ${tzAbbr}` } - + return formattedDate } diff --git a/sim/lib/variables/variable-manager.test.ts b/apps/sim/lib/variables/variable-manager.test.ts similarity index 100% rename from sim/lib/variables/variable-manager.test.ts rename to apps/sim/lib/variables/variable-manager.test.ts diff --git a/sim/lib/variables/variable-manager.ts b/apps/sim/lib/variables/variable-manager.ts similarity index 100% rename from sim/lib/variables/variable-manager.ts rename to apps/sim/lib/variables/variable-manager.ts diff --git a/sim/lib/waitlist/rate-limiter.ts b/apps/sim/lib/waitlist/rate-limiter.ts similarity index 100% rename from sim/lib/waitlist/rate-limiter.ts rename to apps/sim/lib/waitlist/rate-limiter.ts index 8efbe8e13..462f841d7 100644 --- a/sim/lib/waitlist/rate-limiter.ts +++ b/apps/sim/lib/waitlist/rate-limiter.ts @@ -1,6 +1,6 @@ import { NextRequest } from 'next/server' -import { getRedisClient } from '../redis' import { isProd } from '@/lib/environment' +import { getRedisClient } from '../redis' // Configuration const RATE_LIMIT_WINDOW = 60 // 1 minute window (in seconds) diff --git a/sim/lib/waitlist/service.ts b/apps/sim/lib/waitlist/service.ts similarity index 85% rename from sim/lib/waitlist/service.ts rename to apps/sim/lib/waitlist/service.ts index da74854ea..1e831d1db 100644 --- a/sim/lib/waitlist/service.ts +++ b/apps/sim/lib/waitlist/service.ts @@ -1,11 +1,11 @@ -import { and, count, desc, eq, like, or, SQL, inArray } from 'drizzle-orm' +import { and, count, desc, eq, inArray, like, or, SQL } from 'drizzle-orm' import { nanoid } from 'nanoid' import { getEmailSubject, renderWaitlistApprovalEmail, renderWaitlistConfirmationEmail, } from '@/components/emails/render-email' -import { sendEmail, sendBatchEmails } from '@/lib/mailer' +import { sendBatchEmails, sendEmail } from '@/lib/mailer' import { createToken, verifyToken } from '@/lib/waitlist/token' import { db } from '@/db' import { waitlist } from '@/db/schema' @@ -213,22 +213,24 @@ export async function approveWaitlistUser( // If email sending failed, don't update the user status if (!emailResult.success) { console.error('Error sending approval email:', emailResult.message) - + // Check if it's a rate limit error - if (emailResult.message?.toLowerCase().includes('rate') || - emailResult.message?.toLowerCase().includes('too many') || - emailResult.message?.toLowerCase().includes('limit')) { + if ( + emailResult.message?.toLowerCase().includes('rate') || + emailResult.message?.toLowerCase().includes('too many') || + emailResult.message?.toLowerCase().includes('limit') + ) { return { success: false, message: 'Rate limit exceeded for email sending', - rateLimited: true + rateLimited: true, } } - + return { success: false, message: emailResult.message || 'Failed to send approval email', - emailError: emailResult + emailError: emailResult, } } @@ -243,34 +245,36 @@ export async function approveWaitlistUser( return { success: true, - message: 'User approved and email sent' + message: 'User approved and email sent', } } catch (emailError) { console.error('Error sending approval email:', emailError) - + // Check if it's a rate limit error - if (emailError instanceof Error && - (emailError.message.toLowerCase().includes('rate') || - emailError.message.toLowerCase().includes('too many') || - emailError.message.toLowerCase().includes('limit'))) { + if ( + emailError instanceof Error && + (emailError.message.toLowerCase().includes('rate') || + emailError.message.toLowerCase().includes('too many') || + emailError.message.toLowerCase().includes('limit')) + ) { return { success: false, message: 'Rate limit exceeded for email sending', - rateLimited: true + rateLimited: true, } } - + return { success: false, message: 'Failed to send approval email', - emailError + emailError, } } } catch (error) { console.error('Error approving waitlist user:', error) return { success: false, - message: 'An error occurred while approving user' + message: 'An error occurred while approving user', } } } @@ -439,22 +443,24 @@ export async function resendApprovalEmail( // Check for email sending failures if (!emailResult.success) { console.error('Error sending approval email:', emailResult.message) - + // Check if it's a rate limit error - if (emailResult.message?.toLowerCase().includes('rate') || - emailResult.message?.toLowerCase().includes('too many') || - emailResult.message?.toLowerCase().includes('limit')) { + if ( + emailResult.message?.toLowerCase().includes('rate') || + emailResult.message?.toLowerCase().includes('too many') || + emailResult.message?.toLowerCase().includes('limit') + ) { return { success: false, message: 'Rate limit exceeded for email sending', - rateLimited: true + rateLimited: true, } } - + return { success: false, message: emailResult.message || 'Failed to send approval email', - emailError: emailResult + emailError: emailResult, } } @@ -464,23 +470,25 @@ export async function resendApprovalEmail( } } catch (emailError) { console.error('Error sending approval email:', emailError) - + // Check if it's a rate limit error - if (emailError instanceof Error && - (emailError.message.toLowerCase().includes('rate') || - emailError.message.toLowerCase().includes('too many') || - emailError.message.toLowerCase().includes('limit'))) { + if ( + emailError instanceof Error && + (emailError.message.toLowerCase().includes('rate') || + emailError.message.toLowerCase().includes('too many') || + emailError.message.toLowerCase().includes('limit')) + ) { return { success: false, message: 'Rate limit exceeded for email sending', - rateLimited: true + rateLimited: true, } } - + return { success: false, message: 'Failed to send approval email', - emailError + emailError, } } } catch (error) { @@ -493,13 +501,11 @@ export async function resendApprovalEmail( } // Approve multiple users from the waitlist and send approval emails in batches -export async function approveBatchWaitlistUsers( - emails: string[] -): Promise<{ - success: boolean - message: string - results: Array<{ email: string, success: boolean, message: string }> - emailErrors?: any +export async function approveBatchWaitlistUsers(emails: string[]): Promise<{ + success: boolean + message: string + results: Array<{ email: string; success: boolean; message: string }> + emailErrors?: any rateLimited?: boolean }> { try { @@ -512,8 +518,8 @@ export async function approveBatchWaitlistUsers( } // Fetch all users from the waitlist that match the emails - const normalizedEmails = emails.map(email => email.trim().toLowerCase()) - + const normalizedEmails = emails.map((email) => email.trim().toLowerCase()) + const users = await db .select() .from(waitlist) @@ -521,10 +527,7 @@ export async function approveBatchWaitlistUsers( and( inArray(waitlist.email, normalizedEmails), // Only select users who aren't already approved - or( - eq(waitlist.status, 'pending'), - eq(waitlist.status, 'rejected') - ) + or(eq(waitlist.status, 'pending'), eq(waitlist.status, 'rejected')) ) ) @@ -532,7 +535,7 @@ export async function approveBatchWaitlistUsers( return { success: false, message: 'No valid users found for approval', - results: emails.map(email => ({ + results: emails.map((email) => ({ email, success: false, message: 'User not found or already approved', @@ -542,7 +545,7 @@ export async function approveBatchWaitlistUsers( // Create email options for each user const emailOptions = await Promise.all( - users.map(async user => { + users.map(async (user) => { // Create a special signup token const token = await createToken({ email: user.email, @@ -571,7 +574,7 @@ export async function approveBatchWaitlistUsers( // Process results and update database const results = users.map((user, index) => { const emailResult = emailResults.results[index] - + if (emailResult?.success) { // Update user status to approved in database return { @@ -592,8 +595,8 @@ export async function approveBatchWaitlistUsers( // Update approved users in the database const successfulEmails = results - .filter(result => result.success) - .map(result => result.email) + .filter((result) => result.success) + .map((result) => result.email) if (successfulEmails.length > 0) { await db @@ -606,31 +609,34 @@ export async function approveBatchWaitlistUsers( and( inArray(waitlist.email, successfulEmails), // Only update users who aren't already approved - or( - eq(waitlist.status, 'pending'), - eq(waitlist.status, 'rejected') - ) + or(eq(waitlist.status, 'pending'), eq(waitlist.status, 'rejected')) ) ) } // Check if any rate limit errors occurred const rateLimitError = emailResults.results.some( - (result: { message?: string }) => - result.message?.toLowerCase().includes('rate') || + (result: { message?: string }) => + result.message?.toLowerCase().includes('rate') || result.message?.toLowerCase().includes('too many') || result.message?.toLowerCase().includes('limit') ) return { success: successfulEmails.length > 0, - message: successfulEmails.length === users.length - ? 'All users approved successfully' - : successfulEmails.length > 0 - ? 'Some users approved successfully' - : 'Failed to approve any users', - results: results.map(({ email, success, message }: { email: string; success: boolean; message: string }) => - ({ email, success, message })), + message: + successfulEmails.length === users.length + ? 'All users approved successfully' + : successfulEmails.length > 0 + ? 'Some users approved successfully' + : 'Failed to approve any users', + results: results.map( + ({ email, success, message }: { email: string; success: boolean; message: string }) => ({ + email, + success, + message, + }) + ), emailErrors: emailResults.results.some((r: { success: boolean }) => !r.success), rateLimited: rateLimitError, } @@ -639,7 +645,7 @@ export async function approveBatchWaitlistUsers( return { success: false, message: 'An error occurred while approving users', - results: emails.map(email => ({ + results: emails.map((email) => ({ email, success: false, message: 'Operation failed due to server error', diff --git a/sim/lib/waitlist/token.ts b/apps/sim/lib/waitlist/token.ts similarity index 100% rename from sim/lib/waitlist/token.ts rename to apps/sim/lib/waitlist/token.ts diff --git a/sim/lib/webhooks/utils.ts b/apps/sim/lib/webhooks/utils.ts similarity index 89% rename from sim/lib/webhooks/utils.ts rename to apps/sim/lib/webhooks/utils.ts index bf5165f91..587673440 100644 --- a/sim/lib/webhooks/utils.ts +++ b/apps/sim/lib/webhooks/utils.ts @@ -4,6 +4,7 @@ import { v4 as uuidv4 } from 'uuid' import { createLogger } from '@/lib/logs/console-logger' import { persistExecutionError, persistExecutionLogs } from '@/lib/logs/execution-logger' import { buildTraceSpans } from '@/lib/logs/trace-spans' +import { hasProcessedMessage, markMessageAsProcessed } from '@/lib/redis' import { decryptSecret } from '@/lib/utils' import { updateWorkflowRunCounts } from '@/lib/workflows/utils' import { mergeSubblockStateAsync } from '@/stores/workflows/utils' @@ -12,7 +13,6 @@ import { db } from '@/db' import { environment, userStats, webhook } from '@/db/schema' import { Executor } from '@/executor' import { Serializer } from '@/serializer' -import { hasProcessedMessage, markMessageAsProcessed } from '@/lib/redis' const logger = createLogger('WebhookUtils') @@ -73,13 +73,11 @@ export async function handleWhatsAppVerification( /** * Handle Slack verification challenges */ -export function handleSlackChallenge( - body: any -): NextResponse | null { +export function handleSlackChallenge(body: any): NextResponse | null { if (body.type === 'url_verification' && body.challenge) { return NextResponse.json({ challenge: body.challenge }) } - + return null } @@ -150,7 +148,6 @@ export async function validateSlackSignature( } } - /** * Process WhatsApp message deduplication */ @@ -161,17 +158,17 @@ export async function processWhatsAppDeduplication( if (messages.length > 0) { const message = messages[0] const messageId = message.id - + if (messageId) { const whatsappMsgKey = `whatsapp:msg:${messageId}` - + try { const isDuplicate = await hasProcessedMessage(whatsappMsgKey) if (isDuplicate) { logger.info(`[${requestId}] Duplicate WhatsApp message detected: ${messageId}`) return new NextResponse('Duplicate message', { status: 200 }) } - + // Mark as processed BEFORE processing await markMessageAsProcessed(whatsappMsgKey, 60 * 60 * 24) } catch (error) { @@ -180,7 +177,7 @@ export async function processWhatsAppDeduplication( } } } - + return null } @@ -195,20 +192,20 @@ export async function processGenericDeduplication( try { const requestHash = await generateRequestHash(path, body) const genericMsgKey = `generic:${requestHash}` - + const isDuplicate = await hasProcessedMessage(genericMsgKey) if (isDuplicate) { logger.info(`[${requestId}] Duplicate request detected with hash: ${requestHash}`) return new NextResponse('Duplicate request', { status: 200 }) } - + // Mark as processed await markMessageAsProcessed(genericMsgKey, 60 * 60 * 24) } catch (error) { logger.error(`[${requestId}] Error in generic deduplication`, error) // Continue processing } - + return null } @@ -225,7 +222,7 @@ export function formatWebhookInput( // WhatsApp input formatting logic const data = body?.entry?.[0]?.changes?.[0]?.value const messages = data?.messages || [] - + if (messages.length > 0) { const message = messages[0] const phoneNumberId = data.metadata?.phone_number_id @@ -233,7 +230,7 @@ export function formatWebhookInput( const messageId = message.id const timestamp = message.timestamp const text = message.text?.body - + return { whatsapp: { data: { @@ -243,7 +240,7 @@ export function formatWebhookInput( text, timestamp, raw: message, - } + }, }, webhook: { data: { @@ -253,9 +250,9 @@ export function formatWebhookInput( payload: body, headers: Object.fromEntries(request.headers.entries()), method: request.method, - } + }, }, - workflowId: foundWorkflow.id + workflowId: foundWorkflow.id, } } else { return null @@ -271,9 +268,9 @@ export function formatWebhookInput( payload: body, headers: Object.fromEntries(request.headers.entries()), method: request.method, - } + }, }, - workflowId: foundWorkflow.id + workflowId: foundWorkflow.id, } } } @@ -283,7 +280,7 @@ export function formatWebhookInput( */ export async function executeWorkflowFromPayload( foundWorkflow: any, - input: any, + input: any, executionId: string, requestId: string ): Promise { @@ -293,79 +290,81 @@ export async function executeWorkflowFromPayload( executionId, triggerSource: 'webhook-payload', }) - + // DEBUG: Log specific payload details if (input?.airtableChanges) { logger.debug(`[${requestId}] TRACE: Execution received Airtable input`, { changeCount: input.airtableChanges.length, firstTableId: input.airtableChanges[0]?.tableId, - timestamp: new Date().toISOString() - }); + timestamp: new Date().toISOString(), + }) } - + // Validate and ensure proper input structure if (!input) { - logger.warn(`[${requestId}] Empty input for workflow execution, creating empty object`); - input = {}; + logger.warn(`[${requestId}] Empty input for workflow execution, creating empty object`) + input = {} } - + // Special handling for Airtable webhook inputs if (input.airtableChanges) { if (!Array.isArray(input.airtableChanges)) { - logger.warn(`[${requestId}] Invalid airtableChanges input type (${typeof input.airtableChanges}), converting to array`); + logger.warn( + `[${requestId}] Invalid airtableChanges input type (${typeof input.airtableChanges}), converting to array` + ) // Force to array if somehow not an array - input.airtableChanges = [input.airtableChanges]; + input.airtableChanges = [input.airtableChanges] } - + // Log the structure of the payload for debugging logger.info(`[${requestId}] Airtable webhook payload:`, { changeCount: input.airtableChanges.length, hasAirtableChanges: true, sampleTableIds: input.airtableChanges.slice(0, 2).map((c: any) => c.tableId), - }); + }) } - + // Log the full input format to help diagnose data issues logger.debug(`[${requestId}] Workflow input format:`, { inputKeys: Object.keys(input || {}), hasAirtableChanges: input && input.airtableChanges && Array.isArray(input.airtableChanges), airtableChangesCount: input?.airtableChanges?.length || 0, - }); - + }) + // Returns void as errors are handled internally try { // Get the workflow state if (!foundWorkflow.state) { logger.error(`[${requestId}] TRACE: Missing workflow state`, { workflowId: foundWorkflow.id, - hasState: false - }); + hasState: false, + }) throw new Error(`Workflow ${foundWorkflow.id} has no state`) } const state = foundWorkflow.state as any const { blocks, edges, loops } = state - + // DEBUG: Log state information logger.debug(`[${requestId}] TRACE: Retrieved workflow state`, { workflowId: foundWorkflow.id, blockCount: Object.keys(blocks || {}).length, edgeCount: (edges || []).length, - loopCount: (loops || []).length - }); + loopCount: (loops || []).length, + }) logger.debug( `[${requestId}] Merging subblock states for workflow ${foundWorkflow.id} (Execution: ${executionId})` ) - - const mergeStartTime = Date.now(); + + const mergeStartTime = Date.now() const mergedStates = await mergeSubblockStateAsync(blocks, foundWorkflow.id) logger.debug(`[${requestId}] TRACE: State merging complete`, { duration: `${Date.now() - mergeStartTime}ms`, - mergedBlockCount: Object.keys(mergedStates).length - }); + mergedBlockCount: Object.keys(mergedStates).length, + }) // Retrieve and decrypt environment variables - const envStartTime = Date.now(); + const envStartTime = Date.now() const [userEnv] = await db .select() .from(environment) @@ -390,20 +389,20 @@ export async function executeWorkflowFromPayload( ) const decryptedEntries = await Promise.all(decryptionPromises) decryptedEnvVars = Object.fromEntries(decryptedEntries) - + // DEBUG: Log env vars retrieval logger.debug(`[${requestId}] TRACE: Environment variables decrypted`, { duration: `${Date.now() - envStartTime}ms`, - envVarCount: Object.keys(decryptedEnvVars).length - }); + envVarCount: Object.keys(decryptedEnvVars).length, + }) } else { logger.debug(`[${requestId}] TRACE: No environment variables found for user`, { - userId: foundWorkflow.userId - }); + userId: foundWorkflow.userId, + }) } // Process block states (extract subBlock values, parse responseFormat) - const blockStatesStartTime = Date.now(); + const blockStatesStartTime = Date.now() const currentBlockStates = Object.entries(mergedStates).reduce( (acc, [id, block]) => { acc[id] = Object.entries(block.subBlocks).reduce( @@ -453,15 +452,15 @@ export async function executeWorkflowFromPayload( }, {} as Record> ) - + // DEBUG: Log block state processing logger.debug(`[${requestId}] TRACE: Block states processed`, { duration: `${Date.now() - blockStatesStartTime}ms`, - blockCount: Object.keys(processedBlockStates).length - }); + blockCount: Object.keys(processedBlockStates).length, + }) // Serialize and get workflow variables - const serializeStartTime = Date.now(); + const serializeStartTime = Date.now() const serializedWorkflow = new Serializer().serializeWorkflow(mergedStates as any, edges, loops) let workflowVariables = {} if (foundWorkflow.variables) { @@ -478,47 +477,52 @@ export async function executeWorkflowFromPayload( ) } } - + // DEBUG: Log serialization completion logger.debug(`[${requestId}] TRACE: Workflow serialized`, { duration: `${Date.now() - serializeStartTime}ms`, - hasWorkflowVars: Object.keys(workflowVariables).length > 0 - }); + hasWorkflowVars: Object.keys(workflowVariables).length > 0, + }) logger.debug(`[${requestId}] Starting workflow execution`, { executionId, blockCount: Object.keys(processedBlockStates).length, }) - + // Log blocks for debugging (if any missing or invalid) if (Object.keys(processedBlockStates).length === 0) { - logger.error(`[${requestId}] No blocks found in workflow state - this will likely fail`); + logger.error(`[${requestId}] No blocks found in workflow state - this will likely fail`) } else { logger.debug(`[${requestId}] Block IDs for execution:`, { blockIds: Object.keys(processedBlockStates).slice(0, 5), // Log just a few block IDs for debugging - totalBlocks: Object.keys(processedBlockStates).length - }); + totalBlocks: Object.keys(processedBlockStates).length, + }) } - + // Ensure workflow variables exist if (!workflowVariables || Object.keys(workflowVariables).length === 0) { - logger.debug(`[${requestId}] No workflow variables defined, using empty object`); - workflowVariables = {}; + logger.debug(`[${requestId}] No workflow variables defined, using empty object`) + workflowVariables = {} } - + // Validate input format for Airtable webhooks to prevent common errors - if (input?.airtableChanges && (!Array.isArray(input.airtableChanges) || input.airtableChanges.length === 0)) { - logger.warn(`[${requestId}] Invalid Airtable input format - airtableChanges should be a non-empty array`); + if ( + input?.airtableChanges && + (!Array.isArray(input.airtableChanges) || input.airtableChanges.length === 0) + ) { + logger.warn( + `[${requestId}] Invalid Airtable input format - airtableChanges should be a non-empty array` + ) } - + // DEBUG: Log critical moment before executor creation logger.info(`[${requestId}] TRACE: Creating workflow executor`, { workflowId: foundWorkflow.id, hasSerializedWorkflow: !!serializedWorkflow, blockCount: Object.keys(processedBlockStates).length, - timestamp: new Date().toISOString() - }); - + timestamp: new Date().toISOString(), + }) + const executor = new Executor( serializedWorkflow, processedBlockStates, @@ -526,54 +530,55 @@ export async function executeWorkflowFromPayload( input, // Use the provided input (might be single event or batch) workflowVariables ) - + // Log workflow execution start time for tracking - const executionStartTime = Date.now(); + const executionStartTime = Date.now() logger.info(`[${requestId}] TRACE: Executor instantiated, starting workflow execution now`, { workflowId: foundWorkflow.id, - timestamp: new Date().toISOString() - }); - + timestamp: new Date().toISOString(), + }) + // Add direct detailed logging right before executing - logger.info(`[${requestId}] EXECUTION_MONITOR: About to call executor.execute() - CRITICAL POINT`, { - workflowId: foundWorkflow.id, - executionId: executionId, - timestamp: new Date().toISOString() - }); - + logger.info( + `[${requestId}] EXECUTION_MONITOR: About to call executor.execute() - CRITICAL POINT`, + { + workflowId: foundWorkflow.id, + executionId: executionId, + timestamp: new Date().toISOString(), + } + ) + // This is THE critical line where the workflow actually executes const result = await executor.execute(foundWorkflow.id) - + // Check if we got a StreamingExecution result (with stream + execution properties) // For webhook executions, we only care about the ExecutionResult part, not the stream - const executionResult = 'stream' in result && 'execution' in result - ? result.execution - : result - + const executionResult = 'stream' in result && 'execution' in result ? result.execution : result + // Add direct detailed logging right after executing logger.info(`[${requestId}] EXECUTION_MONITOR: executor.execute() completed with result`, { workflowId: foundWorkflow.id, executionId: executionId, success: executionResult.success, resultType: result ? typeof result : 'undefined', - timestamp: new Date().toISOString() - }); - + timestamp: new Date().toISOString(), + }) + // Log completion and timing - const executionDuration = Date.now() - executionStartTime; + const executionDuration = Date.now() - executionStartTime logger.info(`[${requestId}] TRACE: Workflow execution completed`, { workflowId: foundWorkflow.id, success: executionResult.success, duration: `${executionDuration}ms`, actualDurationMs: executionDuration, - timestamp: new Date().toISOString() - }); - + timestamp: new Date().toISOString(), + }) + logger.info(`[${requestId}] Workflow execution finished`, { executionId, success: executionResult.success, durationMs: executionResult.metadata?.duration || executionDuration, - actualDurationMs: executionDuration + actualDurationMs: executionDuration, }) // Update counts and stats if successful @@ -586,12 +591,12 @@ export async function executeWorkflowFromPayload( lastActive: new Date(), }) .where(eq(userStats.userId, foundWorkflow.userId)) - + // DEBUG: Log stats update logger.debug(`[${requestId}] TRACE: Workflow stats updated`, { workflowId: foundWorkflow.id, - userId: foundWorkflow.userId - }); + userId: foundWorkflow.userId, + }) } // Build and enrich result with trace spans @@ -600,13 +605,13 @@ export async function executeWorkflowFromPayload( // Persist logs for this execution using the standard 'webhook' trigger type await persistExecutionLogs(foundWorkflow.id, executionId, enrichedResult, 'webhook') - + // DEBUG: Final success log logger.info(`[${requestId}] TRACE: Execution logs persisted successfully`, { workflowId: foundWorkflow.id, executionId, - timestamp: new Date().toISOString() - }); + timestamp: new Date().toISOString(), + }) } catch (error: any) { // DEBUG: Detailed error information logger.error(`[${requestId}] TRACE: Error during workflow execution`, { @@ -615,9 +620,9 @@ export async function executeWorkflowFromPayload( errorType: error.constructor.name, errorMessage: error.message, stack: error.stack, - timestamp: new Date().toISOString() - }); - + timestamp: new Date().toISOString(), + }) + logger.error(`[${requestId}] Error executing workflow`, { workflowId: foundWorkflow.id, executionId, @@ -653,9 +658,7 @@ export function verifyProviderWebhook( let isAuthenticated = false // Check for token in Authorization header (Bearer token) if (providerConfig.token) { - const providedToken = authHeader?.startsWith('Bearer ') - ? authHeader.substring(7) - : null + const providedToken = authHeader?.startsWith('Bearer ') ? authHeader.substring(7) : null if (providedToken === providerConfig.token) { isAuthenticated = true } @@ -701,7 +704,7 @@ export function verifyProviderWebhook( } } } - + return null } @@ -728,8 +731,8 @@ export async function fetchAndProcessAirtablePayloads( webhookId: webhookData.id, workflowId: workflowData.id, hasBaseId: !!localProviderConfig.baseId, - hasExternalId: !!localProviderConfig.externalId - }); + hasExternalId: !!localProviderConfig.externalId, + }) try { // --- Essential IDs & Config from localProviderConfig --- @@ -754,7 +757,9 @@ export async function fetchAndProcessAirtablePayloads( // Initialize cursor in provider config if missing if (storedCursor === undefined || storedCursor === null) { - logger.info(`[${requestId}] No cursor found in providerConfig for webhook ${webhookData.id}, initializing...`) + logger.info( + `[${requestId}] No cursor found in providerConfig for webhook ${webhookData.id}, initializing...` + ) // Update the local copy localProviderConfig.externalWebhookCursor = null @@ -788,10 +793,14 @@ export async function fetchAndProcessAirtablePayloads( if (storedCursor && typeof storedCursor === 'number') { currentCursor = storedCursor - logger.debug(`[${requestId}] Using stored cursor: ${currentCursor} for webhook ${webhookData.id}`) + logger.debug( + `[${requestId}] Using stored cursor: ${currentCursor} for webhook ${webhookData.id}` + ) } else { currentCursor = null // Airtable API defaults to 1 if omitted - logger.debug(`[${requestId}] No valid stored cursor for webhook ${webhookData.id}, starting from beginning`) + logger.debug( + `[${requestId}] No valid stored cursor for webhook ${webhookData.id}, starting from beginning` + ) } // --- Get OAuth Token --- @@ -849,24 +858,24 @@ export async function fetchAndProcessAirtablePayloads( logger.debug(`[${requestId}] Fetching Airtable payloads (call ${apiCallCount})`, { url: fullUrl, - webhookId: webhookData.id + webhookId: webhookData.id, }) try { - const fetchStartTime = Date.now(); + const fetchStartTime = Date.now() const response = await fetch(fullUrl, { method: 'GET', headers: { Authorization: `Bearer ${accessToken}`, 'Content-Type': 'application/json' }, }) - + // DEBUG: Log API response time logger.debug(`[${requestId}] TRACE: Airtable API response received`, { status: response.status, duration: `${Date.now() - fetchStartTime}ms`, hasBody: true, - apiCall: apiCallCount - }); - + apiCall: apiCallCount, + }) + const responseBody = await response.json() if (!response.ok || responseBody.error) { @@ -889,21 +898,23 @@ export async function fetchAndProcessAirtablePayloads( } const receivedPayloads = responseBody.payloads || [] - logger.debug(`[${requestId}] Received ${receivedPayloads.length} payloads from Airtable (call ${apiCallCount})`) + logger.debug( + `[${requestId}] Received ${receivedPayloads.length} payloads from Airtable (call ${apiCallCount})` + ) // --- Process and Consolidate Changes --- if (receivedPayloads.length > 0) { payloadsFetched += receivedPayloads.length - let changeCount = 0; + let changeCount = 0 for (const payload of receivedPayloads) { if (payload.changedTablesById) { // DEBUG: Log tables being processed - const tableIds = Object.keys(payload.changedTablesById); + const tableIds = Object.keys(payload.changedTablesById) logger.debug(`[${requestId}] TRACE: Processing changes for tables`, { tables: tableIds, - payloadTimestamp: payload.timestamp - }); - + payloadTimestamp: payload.timestamp, + }) + for (const [tableId, tableChangesUntyped] of Object.entries( payload.changedTablesById )) { @@ -911,11 +922,13 @@ export async function fetchAndProcessAirtablePayloads( // Handle created records if (tableChanges.createdRecordsById) { - const createdCount = Object.keys(tableChanges.createdRecordsById).length; - changeCount += createdCount; + const createdCount = Object.keys(tableChanges.createdRecordsById).length + changeCount += createdCount // DEBUG: Log created records count - logger.debug(`[${requestId}] TRACE: Processing ${createdCount} created records for table ${tableId}`); - + logger.debug( + `[${requestId}] TRACE: Processing ${createdCount} created records for table ${tableId}` + ) + for (const [recordId, recordDataUntyped] of Object.entries( tableChanges.createdRecordsById )) { @@ -942,11 +955,13 @@ export async function fetchAndProcessAirtablePayloads( // Handle updated records if (tableChanges.changedRecordsById) { - const updatedCount = Object.keys(tableChanges.changedRecordsById).length; - changeCount += updatedCount; + const updatedCount = Object.keys(tableChanges.changedRecordsById).length + changeCount += updatedCount // DEBUG: Log updated records count - logger.debug(`[${requestId}] TRACE: Processing ${updatedCount} updated records for table ${tableId}`); - + logger.debug( + `[${requestId}] TRACE: Processing ${updatedCount} updated records for table ${tableId}` + ) + for (const [recordId, recordDataUntyped] of Object.entries( tableChanges.changedRecordsById )) { @@ -982,11 +997,14 @@ export async function fetchAndProcessAirtablePayloads( } } } - + // DEBUG: Log totals for this batch - logger.debug(`[${requestId}] TRACE: Processed ${changeCount} changes in API call ${apiCallCount}`, { - currentMapSize: consolidatedChangesMap.size - }); + logger.debug( + `[${requestId}] TRACE: Processed ${changeCount} changes in API call ${apiCallCount}`, + { + currentMapSize: consolidatedChangesMap.size, + } + ) } const nextCursor = responseBody.cursor @@ -995,7 +1013,7 @@ export async function fetchAndProcessAirtablePayloads( if (nextCursor && typeof nextCursor === 'number' && nextCursor !== currentCursor) { logger.debug(`[${requestId}] Updating cursor from ${currentCursor} to ${nextCursor}`) currentCursor = nextCursor - + // Follow exactly the old implementation - use awaited update instead of parallel const updatedConfig = { ...localProviderConfig, externalWebhookCursor: currentCursor } try { @@ -1054,42 +1072,45 @@ export async function fetchAndProcessAirtablePayloads( // Convert map values to array for final processing const finalConsolidatedChanges = Array.from(consolidatedChangesMap.values()) - logger.info(`[${requestId}] Consolidated ${finalConsolidatedChanges.length} Airtable changes across ${apiCallCount} API calls`) + logger.info( + `[${requestId}] Consolidated ${finalConsolidatedChanges.length} Airtable changes across ${apiCallCount} API calls` + ) // --- Execute Workflow if we have changes (simplified - no lock check) --- if (finalConsolidatedChanges.length > 0) { try { // Format the input for the executor using the consolidated changes const input = { airtableChanges: finalConsolidatedChanges } // Use the consolidated array - + // CRITICAL EXECUTION TRACE POINT - logger.info(`[${requestId}] CRITICAL_TRACE: Beginning workflow execution with ${finalConsolidatedChanges.length} Airtable changes`, { - workflowId: workflowData.id, - recordCount: finalConsolidatedChanges.length, - timestamp: new Date().toISOString(), - firstRecordId: finalConsolidatedChanges[0]?.recordId || 'none' - }); - + logger.info( + `[${requestId}] CRITICAL_TRACE: Beginning workflow execution with ${finalConsolidatedChanges.length} Airtable changes`, + { + workflowId: workflowData.id, + recordCount: finalConsolidatedChanges.length, + timestamp: new Date().toISOString(), + firstRecordId: finalConsolidatedChanges[0]?.recordId || 'none', + } + ) + // Execute using the original requestId as the executionId // This is the exact point in the old code where execution happens - we're matching it exactly - await executeWorkflowFromPayload(workflowData, input, requestId, requestId); - + await executeWorkflowFromPayload(workflowData, input, requestId, requestId) + // COMPLETION LOG - This will only appear if execution succeeds logger.info(`[${requestId}] CRITICAL_TRACE: Workflow execution completed successfully`, { workflowId: workflowData.id, - timestamp: new Date().toISOString() - }); + timestamp: new Date().toISOString(), + }) } catch (executionError: any) { // Errors logged within executeWorkflowFromPayload - logger.error( - `[${requestId}] CRITICAL_TRACE: Workflow execution failed with error`, { - workflowId: workflowData.id, - error: executionError.message, - stack: executionError.stack, - timestamp: new Date().toISOString() - } - ); - + logger.error(`[${requestId}] CRITICAL_TRACE: Workflow execution failed with error`, { + workflowId: workflowData.id, + error: executionError.message, + stack: executionError.stack, + timestamp: new Date().toISOString(), + }) + logger.error( `[${requestId}] Error during workflow execution triggered by Airtable polling`, executionError @@ -1100,8 +1121,8 @@ export async function fetchAndProcessAirtablePayloads( logger.info(`[${requestId}] TRACE: No Airtable changes to process`, { workflowId: workflowData.id, apiCallCount, - webhookId: webhookData.id - }); + webhookId: webhookData.id, + }) } } catch (error) { // Catch any unexpected errors during the setup/polling logic itself @@ -1122,14 +1143,14 @@ export async function fetchAndProcessAirtablePayloads( 'webhook' ) } - + // DEBUG: Log function completion logger.debug(`[${requestId}] TRACE: fetchAndProcessAirtablePayloads completed`, { totalFetched: payloadsFetched, totalApiCalls: apiCallCount, totalChanges: consolidatedChangesMap.size, - timestamp: new Date().toISOString() - }); + timestamp: new Date().toISOString(), + }) } /** @@ -1146,15 +1167,15 @@ export async function processWebhook( try { // --- Handle Airtable differently - it should always use fetchAndProcessAirtablePayloads --- if (foundWebhook.provider === 'airtable') { - logger.info(`[${requestId}] Routing Airtable webhook through dedicated processor`); - + logger.info(`[${requestId}] Routing Airtable webhook through dedicated processor`) + // Use the dedicated Airtable payload fetcher and processor - await fetchAndProcessAirtablePayloads(foundWebhook, foundWorkflow, requestId); - + await fetchAndProcessAirtablePayloads(foundWebhook, foundWorkflow, requestId) + // Return standard success response - return NextResponse.json({ message: 'Airtable webhook processed' }, { status: 200 }); + return NextResponse.json({ message: 'Airtable webhook processed' }, { status: 200 }) } - + // --- Provider-specific Auth/Verification (excluding Airtable/WhatsApp/Slack handled earlier) --- if ( foundWebhook.provider && @@ -1168,7 +1189,7 @@ export async function processWebhook( // --- Format Input based on provider (excluding Airtable) --- const input = formatWebhookInput(foundWebhook, foundWorkflow, body, request) - + if (!input && foundWebhook.provider === 'whatsapp') { return new NextResponse('No messages in WhatsApp payload', { status: 200 }) } diff --git a/sim/lib/workflows/utils.ts b/apps/sim/lib/workflows/utils.ts similarity index 100% rename from sim/lib/workflows/utils.ts rename to apps/sim/lib/workflows/utils.ts diff --git a/sim/middleware.ts b/apps/sim/middleware.ts similarity index 85% rename from sim/middleware.ts rename to apps/sim/middleware.ts index 393938afb..258b1b378 100644 --- a/sim/middleware.ts +++ b/apps/sim/middleware.ts @@ -1,8 +1,8 @@ import { NextRequest, NextResponse } from 'next/server' import { getSessionCookie } from 'better-auth/cookies' -import { verifyToken } from './lib/waitlist/token' import { createLogger } from '@/lib/logs/console-logger' import { getBaseDomain } from '@/lib/urls/utils' +import { verifyToken } from './lib/waitlist/token' const logger = createLogger('Middleware') @@ -10,11 +10,11 @@ const logger = createLogger('Middleware') const isDevelopment = process.env.NODE_ENV === 'development' const SUSPICIOUS_UA_PATTERNS = [ - /^\s*$/, // Empty user agents - /\.\./, // Path traversal attempt - /<\s*script/i, // Potential XSS payloads - /^\(\)\s*{/, // Command execution attempt - /\b(sqlmap|nikto|gobuster|dirb|nmap)\b/i // Known scanning tools + /^\s*$/, // Empty user agents + /\.\./, // Path traversal attempt + /<\s*script/i, // Potential XSS payloads + /^\(\)\s*{/, // Command execution attempt + /\b(sqlmap|nikto|gobuster|dirb|nmap)\b/i, // Known scanning tools ] const BASE_DOMAIN = getBaseDomain() @@ -29,13 +29,14 @@ export async function middleware(request: NextRequest) { const url = request.nextUrl const hostname = request.headers.get('host') || '' - + // Extract subdomain - const isCustomDomain = hostname !== BASE_DOMAIN && - !hostname.startsWith('www.') && - hostname.includes(isDevelopment ? 'localhost' : 'simstudio.ai') + const isCustomDomain = + hostname !== BASE_DOMAIN && + !hostname.startsWith('www.') && + hostname.includes(isDevelopment ? 'localhost' : 'simstudio.ai') const subdomain = isCustomDomain ? hostname.split('.')[0] : null - + // Handle chat subdomains if (subdomain && isCustomDomain) { // Special case for API requests from the subdomain @@ -43,11 +44,11 @@ export async function middleware(request: NextRequest) { // Already an API request, let it go through return NextResponse.next() } - + // Rewrite to the chat page but preserve the URL in browser return NextResponse.rewrite(new URL(`/chat/${subdomain}${url.pathname}`, request.url)) } - + // Check if the path is exactly /w if (url.pathname === '/w') { return NextResponse.redirect(new URL('/w/1', request.url)) @@ -85,13 +86,13 @@ export async function middleware(request: NextRequest) { // Check for a waitlist token in the URL const waitlistToken = url.searchParams.get('token') - + // If there's a redirect to the invite page, bypass waitlist check const redirectParam = request.nextUrl.searchParams.get('redirect') if (redirectParam && redirectParam.startsWith('/invite/')) { return NextResponse.next() } - + // Validate the token if present if (waitlistToken) { try { @@ -127,18 +128,16 @@ export async function middleware(request: NextRequest) { } const userAgent = request.headers.get('user-agent') || '' - - const isSuspicious = SUSPICIOUS_UA_PATTERNS.some(pattern => - pattern.test(userAgent) - ) - + + const isSuspicious = SUSPICIOUS_UA_PATTERNS.some((pattern) => pattern.test(userAgent)) + if (isSuspicious) { logger.warn('Blocked suspicious request', { userAgent, ip: request.headers.get('x-forwarded-for') || 'unknown', url: request.url, method: request.method, - pattern: SUSPICIOUS_UA_PATTERNS.find(pattern => pattern.test(userAgent))?.toString() + pattern: SUSPICIOUS_UA_PATTERNS.find((pattern) => pattern.test(userAgent))?.toString(), }) // Return 403 with security headers @@ -151,16 +150,16 @@ export async function middleware(request: NextRequest) { 'X-Frame-Options': 'DENY', 'Content-Security-Policy': "default-src 'none'", 'Cache-Control': 'no-store, no-cache, must-revalidate, proxy-revalidate', - 'Pragma': 'no-cache', - 'Expires': '0' - } + Pragma: 'no-cache', + Expires: '0', + }, }) } - + const response = NextResponse.next() - + response.headers.set('Vary', 'User-Agent') - + return response } @@ -172,6 +171,6 @@ export const config = { '/login', '/signup', '/invite/:path*', // Match invitation routes - '/((?!_next/static|_next/image|favicon.ico).*)' + '/((?!_next/static|_next/image|favicon.ico).*)', ], } diff --git a/apps/sim/next.config.ts b/apps/sim/next.config.ts new file mode 100644 index 000000000..c2ee00a88 --- /dev/null +++ b/apps/sim/next.config.ts @@ -0,0 +1,141 @@ +import type { NextConfig } from 'next' +import { withSentryConfig } from '@sentry/nextjs' +import path from 'path' + +const nextConfig: NextConfig = { + devIndicators: false, + images: { + domains: [ + 'avatars.githubusercontent.com', + 'oaidalleapiprodscus.blob.core.windows.net', + 'api.stability.ai', + ], + }, + output: process.env.NODE_ENV === 'development' ? 'standalone' : undefined, + turbopack: { + resolveExtensions: ['.tsx', '.ts', '.jsx', '.js', '.mjs', '.json'], + }, + experimental: { + optimizeCss: true, + }, + ...(process.env.NODE_ENV === 'development' && { + outputFileTracingRoot: path.join(__dirname, '../../'), + }), + webpack: (config, { isServer, dev }) => { + // Skip webpack configuration in development when using Turbopack + if (dev && process.env.NEXT_RUNTIME === 'turbopack') { + return config + } + + // Configure webpack to use filesystem cache for faster incremental builds + if (config.cache) { + config.cache = { + type: 'filesystem', + buildDependencies: { + config: [__filename], + }, + cacheDirectory: path.resolve(process.cwd(), '.next/cache/webpack'), + } + } + + // Avoid aliasing React on the server/edge runtime builds because it bypasses + // the "react-server" export condition, which Next.js relies on when + // bundling React Server Components and API route handlers. + if (!isServer) { + config.resolve.alias = { + ...config.resolve.alias, + react: path.join(__dirname, '../../node_modules/react'), + 'react-dom': path.join(__dirname, '../../node_modules/react-dom'), + } + } + + return config + }, + transpilePackages: ['prettier', '@react-email/components', '@react-email/render'], + async headers() { + return [ + { + // API routes CORS headers + source: '/api/:path*', + headers: [ + { key: 'Access-Control-Allow-Credentials', value: 'true' }, + { + key: 'Access-Control-Allow-Origin', + value: 'https://localhost:3001', + }, + { + key: 'Access-Control-Allow-Methods', + value: 'GET,POST,OPTIONS,PUT,DELETE', + }, + { + key: 'Access-Control-Allow-Headers', + value: + 'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version', + }, + ], + }, + { + // Apply Cross-Origin Isolation headers to all routes except those that use the Google Drive Picker + source: '/((?!w/.*|api/auth/oauth/drive).*)', + headers: [ + { + key: 'Cross-Origin-Embedder-Policy', + value: 'require-corp', + }, + { + key: 'Cross-Origin-Opener-Policy', + value: 'same-origin', + }, + ], + }, + { + // For routes that use the Google Drive Picker, only apply COOP but not COEP + source: '/(w/.*|api/auth/oauth/drive)', + headers: [ + { + key: 'Cross-Origin-Opener-Policy', + value: 'same-origin', + }, + ], + }, + // Apply security headers to all routes + { + source: '/:path*', + headers: [ + { + key: 'X-Content-Type-Options', + value: 'nosniff', + }, + { + key: 'X-Frame-Options', + value: 'SAMEORIGIN', + }, + { + key: 'Content-Security-Policy', + value: `default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://*.vercel-insights.com https://vercel.live https://*.vercel.live; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com; media-src 'self' blob:; font-src 'self' https://fonts.gstatic.com; connect-src 'self' ${process.env.OLLAMA_HOST || 'http://localhost:11434'} https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.vercel-insights.com https://*.atlassian.com https://vercel.live https://*.vercel.live; frame-src https://drive.google.com https://*.google.com; frame-ancestors 'self'; form-action 'self'; base-uri 'self'; object-src 'none'`, + }, + ], + }, + ] + }, +} + +const sentryConfig = { + silent: true, + org: process.env.SENTRY_ORG || '', + project: process.env.SENTRY_PROJECT || '', + authToken: process.env.SENTRY_AUTH_TOKEN || undefined, + disableSourceMapUpload: process.env.NODE_ENV !== 'production', + autoInstrumentServerFunctions: process.env.NODE_ENV === 'production', + bundleSizeOptimizations: { + excludeDebugStatements: true, + excludePerformanceMonitoring: true, + excludeReplayIframe: true, + excludeReplayShadowDom: true, + excludeReplayWorker: true, + }, +} + +export default process.env.NODE_ENV === 'development' + ? nextConfig + : withSentryConfig(nextConfig, sentryConfig) diff --git a/sim/package-lock.json b/apps/sim/package-lock.json similarity index 100% rename from sim/package-lock.json rename to apps/sim/package-lock.json diff --git a/sim/package.json b/apps/sim/package.json similarity index 86% rename from sim/package.json rename to apps/sim/package.json index 594a3d8d3..5de735aa8 100644 --- a/sim/package.json +++ b/apps/sim/package.json @@ -5,27 +5,28 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "dev": "next dev --turbo", - "dev:classic": "next dev", - "build": "next build", - "start": "next start", - "lint": "next lint", + "dev": "dotenv -- next dev --turbo --port 3000", + "dev:classic": "dotenv -- next dev", + "build": "dotenv -- next build", + "start": "dotenv -- next start", + "lint": "dotenv -- next lint", "format": "prettier --write .", "format:check": "prettier --check .", "prepare": "husky", - "db:push": "drizzle-kit push", - "db:studio": "drizzle-kit studio", - "test": "vitest run", - "test:watch": "vitest", - "test:coverage": "vitest run --coverage", - "email:dev": "email dev --dir components/emails", + "db:push": "dotenv -- drizzle-kit push", + "db:studio": "dotenv -- drizzle-kit studio", + "test": "dotenv -- vitest run", + "test:watch": "dotenv -- vitest", + "test:coverage": "dotenv -- vitest run --coverage", + "email:dev": "dotenv -- email dev --dir components/emails", "cli:build": "npm run build -w packages/simstudio", "cli:dev": "npm run build -w packages/simstudio && cd packages/simstudio && node ./dist/index.js", "cli:publish": "cd packages/simstudio && npm publish", "cli:start": "cd packages/simstudio && node ./dist/index.js start", "build:standalone": "node scripts/build-standalone.js", "build:cli": "npm run cli:build && npm run build:standalone", - "publish:cli": "npm run build:cli && npm run cli:publish" + "publish:cli": "npm run build:cli && npm run cli:publish", + "check-types": "tsc --noEmit" }, "dependencies": { "@anthropic-ai/sdk": "^0.39.0", @@ -45,7 +46,6 @@ "@opentelemetry/sdk-node": "^0.200.0", "@opentelemetry/sdk-trace-web": "^2.0.0", "@opentelemetry/semantic-conventions": "^1.32.0", - "@opentelemetry/web": "^0.24.0", "@radix-ui/react-alert-dialog": "^1.1.5", "@radix-ui/react-checkbox": "^1.1.3", "@radix-ui/react-collapsible": "^1.1.3", @@ -86,7 +86,7 @@ "ioredis": "^5.6.0", "jwt-decode": "^4.0.0", "lenis": "^1.2.3", - "lucide-react": "^0.469.0", + "lucide-react": "^0.479.0", "mammoth": "^1.9.0", "next": "^15.3.1", "next-themes": "^0.4.6", @@ -94,9 +94,9 @@ "pdf-parse": "^1.1.1", "postgres": "^3.4.5", "prismjs": "^1.30.0", - "react": "^18.2.0", - "react-day-picker": "^8.10.1", - "react-dom": "^18.2.0", + "react": "19.1.0", + "react-day-picker": "^9.4.2", + "react-dom": "19.1.0", "react-google-drive-picker": "^1.2.2", "react-hook-form": "^7.54.2", "react-markdown": "^10.1.0", @@ -141,9 +141,5 @@ "*.{js,jsx,ts,tsx,json,css,scss,md}": [ "prettier --write" ] - }, - "workspaces": [ - "packages/@sim/*", - "packages/simstudio" - ] + } } diff --git a/sim/postcss.config.mjs b/apps/sim/postcss.config.mjs similarity index 100% rename from sim/postcss.config.mjs rename to apps/sim/postcss.config.mjs diff --git a/sim/providers/anthropic/index.ts b/apps/sim/providers/anthropic/index.ts similarity index 95% rename from sim/providers/anthropic/index.ts rename to apps/sim/providers/anthropic/index.ts index 965c3db9d..cd5896e9c 100644 --- a/sim/providers/anthropic/index.ts +++ b/apps/sim/providers/anthropic/index.ts @@ -1,8 +1,8 @@ import Anthropic from '@anthropic-ai/sdk' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' import { prepareToolsWithUsageControl, trackForcedToolUsage } from '../utils' const logger = createLogger('AnthropicProvider') @@ -12,7 +12,9 @@ const logger = createLogger('AnthropicProvider') * ReadableStream of raw assistant text chunks. We enqueue only `content_block_delta` events * with `delta.type === 'text_delta'`, since that contains the incremental text tokens. */ -function createReadableStreamFromAnthropicStream(anthropicStream: AsyncIterable): ReadableStream { +function createReadableStreamFromAnthropicStream( + anthropicStream: AsyncIterable +): ReadableStream { return new ReadableStream({ async start(controller) { try { @@ -37,7 +39,9 @@ export const anthropicProvider: ProviderConfig = { models: ['claude-3-5-sonnet-20240620', 'claude-3-7-sonnet-20250219'], defaultModel: 'claude-3-7-sonnet-20250219', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for Anthropic') } @@ -275,9 +279,9 @@ ${fieldDescriptions} let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + // Create a StreamingExecution response with a readable stream const streamingResult = { stream: createReadableStreamFromAnthropicStream(streamResponse), @@ -293,21 +297,23 @@ ${fieldDescriptions} startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], }, // Estimate token cost based on typical Claude pricing cost: { total: 0.0, input: 0.0, - output: 0.0 - } - } + output: 0.0, + }, + }, }, logs: [], // No block logs for direct streaming metadata: { @@ -315,10 +321,10 @@ ${fieldDescriptions} endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } @@ -612,7 +618,7 @@ ${fieldDescriptions} // After all tool processing complete, if streaming was requested and we have messages, use streaming for the final response if (request.stream && iterationCount > 0) { logger.info('Using streaming for final Anthropic response after tool calls') - + // When streaming after tool calls with forced tools, make sure tool_choice is removed // This prevents the API from trying to force tool usage again in the final streaming response const streamingPayload = { @@ -621,10 +627,10 @@ ${fieldDescriptions} // For Anthropic, omit tool_choice entirely rather than setting it to 'none' stream: true, } - + // Remove the tool_choice parameter as Anthropic doesn't accept 'none' as a string value delete streamingPayload.tool_choice - + const streamResponse: any = await anthropic.messages.create(streamingPayload) // Create a StreamingExecution response with all collected data @@ -641,10 +647,13 @@ ${fieldDescriptions} completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -656,11 +665,11 @@ ${fieldDescriptions} timeSegments: timeSegments, }, cost: { - total: (tokens.total || 0) * 0.0001, // Estimate cost based on tokens + total: (tokens.total || 0) * 0.0001, // Estimate cost based on tokens input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], // No block logs at provider level metadata: { @@ -668,10 +677,10 @@ ${fieldDescriptions} endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + return streamingResult as StreamingExecution } diff --git a/sim/providers/cerebras/index.ts b/apps/sim/providers/cerebras/index.ts similarity index 93% rename from sim/providers/cerebras/index.ts rename to apps/sim/providers/cerebras/index.ts index 902ab5fd4..f78d6cc9a 100644 --- a/sim/providers/cerebras/index.ts +++ b/apps/sim/providers/cerebras/index.ts @@ -1,8 +1,8 @@ import { Cerebras } from '@cerebras/cerebras_cloud_sdk' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' const logger = createLogger('CerebrasProvider') @@ -10,7 +10,9 @@ const logger = createLogger('CerebrasProvider') * Helper to convert a Cerebras streaming response (async iterable) into a ReadableStream. * Enqueues only the model's text delta chunks as UTF-8 encoded bytes. */ -function createReadableStreamFromCerebrasStream(cerebrasStream: AsyncIterable): ReadableStream { +function createReadableStreamFromCerebrasStream( + cerebrasStream: AsyncIterable +): ReadableStream { return new ReadableStream({ async start(controller) { try { @@ -25,7 +27,7 @@ function createReadableStreamFromCerebrasStream(cerebrasStream: AsyncIterable => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for Cerebras') } @@ -137,14 +141,14 @@ export const cerebrasProvider: ProviderConfig = { ...payload, stream: true, }) - + // Start collecting token usage let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + // Create a StreamingExecution response with a readable stream const streamingResult = { stream: createReadableStreamFromCerebrasStream(streamResponse), @@ -160,21 +164,23 @@ export const cerebrasProvider: ProviderConfig = { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], }, // Estimate token cost cost: { total: 0.0, input: 0.0, - output: 0.0 - } - } + output: 0.0, + }, + }, }, logs: [], // No block logs for direct streaming metadata: { @@ -182,10 +188,10 @@ export const cerebrasProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } @@ -435,18 +441,18 @@ export const cerebrasProvider: ProviderConfig = { // POST-TOOL-STREAMING: stream after tool calls if requested if (request.stream && iterationCount > 0) { logger.info('Using streaming for final Cerebras response after tool calls') - + // When streaming after tool calls with forced tools, make sure tool_choice is set to 'auto' // This prevents the API from trying to force tool usage again in the final streaming response const streamingPayload = { ...payload, messages: currentMessages, - tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls + tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls stream: true, } - + const streamResponse: any = await client.chat.completions.create(streamingPayload) - + // Create a StreamingExecution response with all collected data const streamingResult = { stream: createReadableStreamFromCerebrasStream(streamResponse), @@ -461,10 +467,13 @@ export const cerebrasProvider: ProviderConfig = { completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -478,9 +487,9 @@ export const cerebrasProvider: ProviderConfig = { cost: { total: (tokens.total || 0) * 0.0001, input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], // No block logs at provider level metadata: { @@ -488,10 +497,10 @@ export const cerebrasProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } diff --git a/sim/providers/cerebras/types.ts b/apps/sim/providers/cerebras/types.ts similarity index 100% rename from sim/providers/cerebras/types.ts rename to apps/sim/providers/cerebras/types.ts diff --git a/sim/providers/deepseek/index.ts b/apps/sim/providers/deepseek/index.ts similarity index 94% rename from sim/providers/deepseek/index.ts rename to apps/sim/providers/deepseek/index.ts index a48c4e1d0..6f9f3bb98 100644 --- a/sim/providers/deepseek/index.ts +++ b/apps/sim/providers/deepseek/index.ts @@ -1,8 +1,8 @@ import OpenAI from 'openai' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' import { prepareToolsWithUsageControl, trackForcedToolUsage } from '../utils' const logger = createLogger('DeepseekProvider') @@ -25,7 +25,7 @@ function createReadableStreamFromDeepseekStream(deepseekStream: any): ReadableSt } catch (error) { controller.error(error) } - } + }, }) } @@ -37,7 +37,9 @@ export const deepseekProvider: ProviderConfig = { models: ['deepseek-chat'], defaultModel: 'deepseek-chat', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for Deepseek') } @@ -129,19 +131,19 @@ export const deepseekProvider: ProviderConfig = { // EARLY STREAMING: if streaming requested and no tools to execute, stream directly if (request.stream && (!tools || tools.length === 0)) { logger.info('Using streaming response for DeepSeek request (no tools)') - + const streamResponse = await deepseek.chat.completions.create({ ...payload, stream: true, }) - + // Start collecting token usage let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + // Create a StreamingExecution response with a readable stream const streamingResult = { stream: createReadableStreamFromDeepseekStream(streamResponse), @@ -157,21 +159,23 @@ export const deepseekProvider: ProviderConfig = { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], }, // Estimate token cost cost: { total: 0.0, input: 0.0, - output: 0.0 - } - } + output: 0.0, + }, + }, }, logs: [], // No block logs for direct streaming metadata: { @@ -179,10 +183,10 @@ export const deepseekProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } @@ -437,7 +441,7 @@ export const deepseekProvider: ProviderConfig = { // POST-TOOL STREAMING: stream final response after tool calls if requested if (request.stream && iterationCount > 0) { logger.info('Using streaming for final DeepSeek response after tool calls') - + // When streaming after tool calls with forced tools, make sure tool_choice is set to 'auto' // This prevents the API from trying to force tool usage again in the final streaming response const streamingPayload = { @@ -446,9 +450,9 @@ export const deepseekProvider: ProviderConfig = { tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls stream: true, } - + const streamResponse = await deepseek.chat.completions.create(streamingPayload) - + // Create a StreamingExecution response with all collected data const streamingResult = { stream: createReadableStreamFromDeepseekStream(streamResponse), @@ -463,10 +467,13 @@ export const deepseekProvider: ProviderConfig = { completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -480,9 +487,9 @@ export const deepseekProvider: ProviderConfig = { cost: { total: (tokens.total || 0) * 0.0001, input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], // No block logs at provider level metadata: { @@ -490,10 +497,10 @@ export const deepseekProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } diff --git a/sim/providers/google/index.ts b/apps/sim/providers/google/index.ts similarity index 83% rename from sim/providers/google/index.ts rename to apps/sim/providers/google/index.ts index 7c109fc34..04d30b6b9 100644 --- a/sim/providers/google/index.ts +++ b/apps/sim/providers/google/index.ts @@ -1,7 +1,7 @@ import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' const logger = createLogger('GoogleProvider') @@ -13,41 +13,41 @@ function createReadableStreamFromGeminiStream(response: Response): ReadableStrea if (!reader) { throw new Error('Failed to get reader from response body') } - + return new ReadableStream({ async start(controller) { try { let buffer = '' - + while (true) { const { done, value } = await reader.read() if (done) { controller.close() break } - + const text = new TextDecoder().decode(value) buffer += text - + try { const lines = buffer.split('\n') buffer = '' - + for (let i = 0; i < lines.length; i++) { const line = lines[i].trim() - + if (i === lines.length - 1 && line !== '') { buffer = line continue } - + if (!line) continue - + if (line.startsWith('data: ')) { const jsonStr = line.substring(6) - + if (jsonStr === '[DONE]') continue - + try { const data = JSON.parse(jsonStr) const candidate = data.candidates?.[0] @@ -60,7 +60,7 @@ function createReadableStreamFromGeminiStream(response: Response): ReadableStrea } catch (e) { logger.error('Error parsing Gemini SSE JSON data', { error: e instanceof Error ? e.message : String(e), - data: jsonStr + data: jsonStr, }) } } @@ -68,20 +68,20 @@ function createReadableStreamFromGeminiStream(response: Response): ReadableStrea } catch (e) { logger.error('Error processing Gemini SSE stream', { error: e instanceof Error ? e.message : String(e), - chunk: text + chunk: text, }) } } } catch (e) { logger.error('Error reading Google Gemini stream', { - error: e instanceof Error ? e.message : String(e) + error: e instanceof Error ? e.message : String(e), }) controller.error(e) } }, async cancel() { await reader.cancel() - } + }, }) } @@ -93,7 +93,9 @@ export const googleProvider: ProviderConfig = { models: ['gemini-2.5-pro-exp-03-25', 'gemini-2.5-flash-preview-04-17'], defaultModel: 'gemini-2.5-pro-exp-03-25', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for Google Gemini') } @@ -107,7 +109,7 @@ export const googleProvider: ProviderConfig = { hasResponseFormat: !!request.responseFormat, streaming: !!request.stream, }) - + // Start execution timer for the entire provider execution const providerStartTime = Date.now() const providerStartTimeISO = new Date(providerStartTime).toISOString() @@ -115,20 +117,20 @@ export const googleProvider: ProviderConfig = { try { // Convert messages to Gemini format const { contents, tools, systemInstruction } = convertToGeminiFormat(request) - + const requestedModel = request.model || 'gemini-2.5-pro-exp-03-25' - + // Build request payload const payload: any = { contents, - generationConfig: {} + generationConfig: {}, } - + // Add temperature if specified if (request.temperature !== undefined && request.temperature !== null) { payload.generationConfig.temperature = request.temperature } - + // Add max tokens if specified if (request.maxTokens !== undefined) { payload.generationConfig.maxOutputTokens = request.maxTokens @@ -142,71 +144,70 @@ export const googleProvider: ProviderConfig = { // Add structured output format if requested if (request.responseFormat) { const responseFormatSchema = request.responseFormat.schema || request.responseFormat - + // Clean the schema using our helper function const cleanSchema = cleanSchemaForGemini(responseFormatSchema) - + // Use Gemini's native structured output approach payload.generationConfig.responseMimeType = 'application/json' payload.generationConfig.responseSchema = cleanSchema - + logger.info('Using Gemini native structured output format', { hasSchema: !!cleanSchema, - mimeType: 'application/json' + mimeType: 'application/json', }) } - + // Add tools if provided if (tools?.length) { - payload.tools = [{ - functionDeclarations: tools - }] - + payload.tools = [ + { + functionDeclarations: tools, + }, + ] + logger.info(`Google Gemini request with tools:`, { toolCount: tools.length, model: requestedModel, - tools: tools.map(t => t.name) + tools: tools.map((t) => t.name), }) } // Make the API request const initialCallTime = Date.now() - + // For streaming requests, add the alt=sse parameter to the URL - const endpoint = request.stream - ? `https://generativelanguage.googleapis.com/v1beta/models/${requestedModel}:generateContent?key=${request.apiKey}&alt=sse` + const endpoint = request.stream + ? `https://generativelanguage.googleapis.com/v1beta/models/${requestedModel}:generateContent?key=${request.apiKey}&alt=sse` : `https://generativelanguage.googleapis.com/v1beta/models/${requestedModel}:generateContent?key=${request.apiKey}` - - const response = await fetch( - endpoint, - { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - }, - body: JSON.stringify(payload), - } - ) + + const response = await fetch(endpoint, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(payload), + }) if (!response.ok) { const responseText = await response.text() - logger.error('Gemini API error details:', { - status: response.status, + logger.error('Gemini API error details:', { + status: response.status, statusText: response.statusText, - responseBody: responseText + responseBody: responseText, }) throw new Error(`Gemini API error: ${response.status} ${response.statusText}`) } const firstResponseTime = Date.now() - initialCallTime - + // Handle streaming response if (request.stream) { logger.info('Handling Google Gemini streaming response') - + // Create a ReadableStream from the Google Gemini stream const stream = createReadableStreamFromGeminiStream(response) - + // Create an object that combines the stream with execution metadata const streamingExecution: StreamingExecution = { stream, @@ -229,20 +230,22 @@ export const googleProvider: ProviderConfig = { toolsTime: 0, firstResponseTime, iterations: 1, - timeSegments: [{ - type: 'model', - name: 'Initial streaming response', - startTime: initialCallTime, - endTime: initialCallTime + firstResponseTime, - duration: firstResponseTime, - }], + timeSegments: [ + { + type: 'model', + name: 'Initial streaming response', + startTime: initialCallTime, + endTime: initialCallTime + firstResponseTime, + duration: firstResponseTime, + }, + ], cost: { total: 0.0, // Initial estimate, updated as tokens are processed input: 0.0, - output: 0.0 - } - } - } + output: 0.0, + }, + }, + }, }, logs: [], metadata: { @@ -250,15 +253,15 @@ export const googleProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: firstResponseTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + return streamingExecution } - + let geminiResponse = await response.json() - + // Check structured output format if (payload.generationConfig?.responseSchema) { const candidate = geminiResponse.candidates?.[0] @@ -273,7 +276,7 @@ export const googleProvider: ProviderConfig = { } } } - + // Initialize response tracking variables let content = '' let tokens = { @@ -304,41 +307,43 @@ export const googleProvider: ProviderConfig = { try { // Extract content or function calls from initial response const candidate = geminiResponse.candidates?.[0] - + // Check if response contains function calls const functionCall = extractFunctionCall(candidate) - + if (functionCall) { logger.info(`Received function call from Gemini: ${functionCall.name}`) - + // Process function calls in a loop while (iterationCount < MAX_ITERATIONS) { // Get the latest function calls const latestResponse = geminiResponse.candidates?.[0] const latestFunctionCall = extractFunctionCall(latestResponse) - + if (!latestFunctionCall) { // No more function calls - extract final text content content = extractTextContent(latestResponse) break } - - logger.info(`Processing function call: ${latestFunctionCall.name} (iteration ${iterationCount + 1}/${MAX_ITERATIONS})`) - + + logger.info( + `Processing function call: ${latestFunctionCall.name} (iteration ${iterationCount + 1}/${MAX_ITERATIONS})` + ) + // Track time for tool calls const toolsStartTime = Date.now() - + try { const toolName = latestFunctionCall.name const toolArgs = latestFunctionCall.args || {} - + // Get the tool from the tools registry const tool = request.tools?.find((t) => t.id === toolName) if (!tool) { logger.warn(`Tool ${toolName} not found in registry, skipping`) break } - + // First, identify parameters marked as requiredForToolCall const requiredToolCallParams: Record = {} if (tool.params) { @@ -349,24 +354,24 @@ export const googleProvider: ProviderConfig = { } }) } - + // Execute the tool const toolCallStartTime = Date.now() - + // Merge arguments in the correct order of precedence: // 1. Default parameters from tool.params // 2. Arguments from the model's function call (toolArgs) // 3. Parameters marked as requiredForToolCall (these should always be preserved) // 4. Workflow context if needed const mergedArgs = { - ...tool.params, // Default parameters defined for the tool - ...toolArgs, // Arguments from the model's function call + ...tool.params, // Default parameters defined for the tool + ...toolArgs, // Arguments from the model's function call ...requiredToolCallParams, // Required parameters from the tool definition (take precedence) ...(request.workflowId ? { _context: { workflowId: request.workflowId } } : {}), } - + // For debugging only - don't log actual API keys - logger.debug(`Executing tool ${toolName} with parameters:`, { + logger.debug(`Executing tool ${toolName} with parameters:`, { parameterKeys: Object.keys(mergedArgs), hasRequiredParams: Object.keys(requiredToolCallParams).length > 0, requiredParamKeys: Object.keys(requiredToolCallParams), @@ -374,29 +379,33 @@ export const googleProvider: ProviderConfig = { const result = await executeTool(toolName, mergedArgs, true) const toolCallEndTime = Date.now() const toolCallDuration = toolCallEndTime - toolCallStartTime - + if (!result.success) { // Check for API key related errors const errorMessage = result.error?.toLowerCase() || '' - if (errorMessage.includes('api key') || errorMessage.includes('apikey') || - errorMessage.includes('x-api-key') || errorMessage.includes('authentication')) { + if ( + errorMessage.includes('api key') || + errorMessage.includes('apikey') || + errorMessage.includes('x-api-key') || + errorMessage.includes('authentication') + ) { logger.error(`Tool ${toolName} failed with API key error:`, { error: result.error, - toolRequiresKey: true + toolRequiresKey: true, }) - + // Add a more helpful error message for the user content = `Error: The ${toolName} tool requires a valid API key. Please ensure you've provided the correct API key for this specific service.` } else { // Regular error handling - logger.warn(`Tool ${toolName} execution failed`, { + logger.warn(`Tool ${toolName} execution failed`, { error: result.error, - duration: toolCallDuration + duration: toolCallDuration, }) } break } - + // Add to time segments timeSegments.push({ type: 'tool', @@ -405,7 +414,7 @@ export const googleProvider: ProviderConfig = { endTime: toolCallEndTime, duration: toolCallDuration, }) - + // Track results toolResults.push(result.output) toolCalls.push({ @@ -416,40 +425,44 @@ export const googleProvider: ProviderConfig = { duration: toolCallDuration, result: result.output, }) - + // Prepare for next request with simplified messages // Use simple format: original query + most recent function call + result const simplifiedMessages = [ // Original user request - find the first user request - ...(contents.filter(m => m.role === 'user').length > 0 - ? [contents.filter(m => m.role === 'user')[0]] + ...(contents.filter((m) => m.role === 'user').length > 0 + ? [contents.filter((m) => m.role === 'user')[0]] : [contents[0]]), // Function call from model { role: 'model', - parts: [{ - functionCall: { - name: latestFunctionCall.name, - args: latestFunctionCall.args - } - }] + parts: [ + { + functionCall: { + name: latestFunctionCall.name, + args: latestFunctionCall.args, + }, + }, + ], }, // Function response - but use USER role since Gemini only accepts user or model { role: 'user', - parts: [{ - text: `Function ${latestFunctionCall.name} result: ${JSON.stringify(toolResults[toolResults.length - 1])}` - }] - } + parts: [ + { + text: `Function ${latestFunctionCall.name} result: ${JSON.stringify(toolResults[toolResults.length - 1])}`, + }, + ], + }, ] - + // Calculate tool call time const thisToolsTime = Date.now() - toolsStartTime toolsTime += thisToolsTime - + // Make the next request with updated messages const nextModelStartTime = Date.now() - + try { // Check if we should stream the final response after tool calls if (request.stream) { @@ -459,12 +472,12 @@ export const googleProvider: ProviderConfig = { contents: simplifiedMessages, tool_config: { mode: 'AUTO' }, // Always use AUTO mode for streaming after tools } - + // Remove any forced tool configuration to prevent issues with streaming if ('tool_config' in streamingPayload) { - streamingPayload.tool_config = { mode: 'AUTO' }; + streamingPayload.tool_config = { mode: 'AUTO' } } - + // Make the streaming request with alt=sse parameter const streamingResponse = await fetch( `https://generativelanguage.googleapis.com/v1beta/models/${requestedModel}:generateContent?key=${request.apiKey}&alt=sse`, @@ -476,25 +489,27 @@ export const googleProvider: ProviderConfig = { body: JSON.stringify(streamingPayload), } ) - + if (!streamingResponse.ok) { const errorBody = await streamingResponse.text() - logger.error('Error in Gemini streaming follow-up request:', { + logger.error('Error in Gemini streaming follow-up request:', { status: streamingResponse.status, statusText: streamingResponse.statusText, - responseBody: errorBody + responseBody: errorBody, }) - throw new Error(`Gemini API streaming error: ${streamingResponse.status} ${streamingResponse.statusText}`) + throw new Error( + `Gemini API streaming error: ${streamingResponse.status} ${streamingResponse.statusText}` + ) } - + // Create a stream from the response const stream = createReadableStreamFromGeminiStream(streamingResponse) - + // Calculate timing information const nextModelEndTime = Date.now() const thisModelTime = nextModelEndTime - nextModelStartTime modelTime += thisModelTime - + // Add to time segments timeSegments.push({ type: 'model', @@ -503,7 +518,7 @@ export const googleProvider: ProviderConfig = { endTime: nextModelEndTime, duration: thisModelTime, }) - + // Return a streaming execution with tool call information const streamingExecution: StreamingExecution = { stream, @@ -514,10 +529,13 @@ export const googleProvider: ProviderConfig = { content: '', model: request.model, tokens, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, toolResults, providerTiming: { startTime: providerStartTimeISO, @@ -530,11 +548,11 @@ export const googleProvider: ProviderConfig = { timeSegments, }, cost: { - total: (tokens.total || 0) * 0.0001, // Estimate cost based on tokens + total: (tokens.total || 0) * 0.0001, // Estimate cost based on tokens input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], metadata: { @@ -542,13 +560,13 @@ export const googleProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + return streamingExecution } - + // Make the next request for non-streaming response const nextResponse = await fetch( `https://generativelanguage.googleapis.com/v1beta/models/${requestedModel}:generateContent?key=${request.apiKey}`, @@ -559,27 +577,27 @@ export const googleProvider: ProviderConfig = { }, body: JSON.stringify({ ...payload, - contents: simplifiedMessages + contents: simplifiedMessages, }), } ) - + if (!nextResponse.ok) { const errorBody = await nextResponse.text() - logger.error('Error in Gemini follow-up request:', { + logger.error('Error in Gemini follow-up request:', { status: nextResponse.status, statusText: nextResponse.statusText, responseBody: errorBody, - iterationCount + iterationCount, }) break } - + geminiResponse = await nextResponse.json() - + const nextModelEndTime = Date.now() const thisModelTime = nextModelEndTime - nextModelStartTime - + // Add to time segments timeSegments.push({ type: 'model', @@ -588,31 +606,31 @@ export const googleProvider: ProviderConfig = { endTime: nextModelEndTime, duration: thisModelTime, }) - + // Add to model time modelTime += thisModelTime - + // Check if we need to continue or break const nextCandidate = geminiResponse.candidates?.[0] const nextFunctionCall = extractFunctionCall(nextCandidate) - + if (!nextFunctionCall) { content = extractTextContent(nextCandidate) break } - + iterationCount++ } catch (error) { - logger.error('Error in Gemini follow-up request:', { + logger.error('Error in Gemini follow-up request:', { error: error instanceof Error ? error.message : String(error), - iterationCount + iterationCount, }) break } } catch (error) { - logger.error('Error processing function call:', { + logger.error('Error processing function call:', { error: error instanceof Error ? error.message : String(error), - functionName: latestFunctionCall?.name || 'unknown' + functionName: latestFunctionCall?.name || 'unknown', }) break } @@ -622,14 +640,14 @@ export const googleProvider: ProviderConfig = { content = extractTextContent(candidate) } } catch (error) { - logger.error('Error processing Gemini response:', { + logger.error('Error processing Gemini response:', { error: error instanceof Error ? error.message : String(error), - iterationCount + iterationCount, }) - + // Don't rethrow, so we can still return partial results if (!content && toolCalls.length > 0) { - content = `Tool call(s) executed: ${toolCalls.map(t => t.name).join(', ')}. Results are available in the tool results.` + content = `Tool call(s) executed: ${toolCalls.map((t) => t.name).join(', ')}. Results are available in the tool results.` } } @@ -643,8 +661,9 @@ export const googleProvider: ProviderConfig = { tokens = { prompt: geminiResponse.usageMetadata.promptTokenCount || 0, completion: geminiResponse.usageMetadata.candidatesTokenCount || 0, - total: (geminiResponse.usageMetadata.promptTokenCount || 0) + - (geminiResponse.usageMetadata.candidatesTokenCount || 0), + total: + (geminiResponse.usageMetadata.promptTokenCount || 0) + + (geminiResponse.usageMetadata.candidatesTokenCount || 0), } } @@ -699,21 +718,21 @@ function cleanSchemaForGemini(schema: any): any { if (schema === null || schema === undefined) return schema if (typeof schema !== 'object') return schema if (Array.isArray(schema)) { - return schema.map(item => cleanSchemaForGemini(item)) + return schema.map((item) => cleanSchemaForGemini(item)) } - + // Create a new object for the deep copy const cleanedSchema: any = {} - + // Process each property in the schema for (const key in schema) { // Skip additionalProperties if (key === 'additionalProperties') continue - + // Deep copy nested objects cleanedSchema[key] = cleanSchemaForGemini(schema[key]) } - + return cleanedSchema } @@ -722,18 +741,20 @@ function cleanSchemaForGemini(schema: any): any { */ function extractTextContent(candidate: any): string { if (!candidate?.content?.parts) return '' - + // Check for JSON response (typically from structured output) if (candidate.content.parts?.length === 1 && candidate.content.parts[0].text) { const text = candidate.content.parts[0].text if (text && (text.trim().startsWith('{') || text.trim().startsWith('['))) { try { JSON.parse(text) // Validate JSON - return text // Return valid JSON as-is - } catch (e) { /* Not valid JSON, continue with normal extraction */ } + return text // Return valid JSON as-is + } catch (e) { + /* Not valid JSON, continue with normal extraction */ + } } } - + // Standard text extraction return candidate.content.parts .filter((part: any) => part.text) @@ -744,15 +765,18 @@ function extractTextContent(candidate: any): string { /** * Helper function to extract a function call from a Gemini response */ -function extractFunctionCall(candidate: any): { name: string, args: any } | null { +function extractFunctionCall(candidate: any): { name: string; args: any } | null { if (!candidate?.content?.parts) return null - + // Check for functionCall in parts for (const part of candidate.content.parts) { if (part.functionCall) { const args = part.functionCall.args || {} // Parse string args if they look like JSON - if (typeof part.functionCall.args === 'string' && part.functionCall.args.trim().startsWith('{')) { + if ( + typeof part.functionCall.args === 'string' && + part.functionCall.args.trim().startsWith('{') + ) { try { return { name: part.functionCall.name, args: JSON.parse(part.functionCall.args) } } catch (e) { @@ -762,39 +786,40 @@ function extractFunctionCall(candidate: any): { name: string, args: any } | null return { name: part.functionCall.name, args } } } - + // Check for alternative function_call format if (candidate.content.function_call) { - const args = typeof candidate.content.function_call.arguments === 'string' - ? JSON.parse(candidate.content.function_call.arguments || '{}') - : candidate.content.function_call.arguments || {} + const args = + typeof candidate.content.function_call.arguments === 'string' + ? JSON.parse(candidate.content.function_call.arguments || '{}') + : candidate.content.function_call.arguments || {} return { name: candidate.content.function_call.name, args } } - + return null } /** * Convert OpenAI-style request format to Gemini format */ -function convertToGeminiFormat(request: ProviderRequest): { - contents: any[], - tools: any[] | undefined, +function convertToGeminiFormat(request: ProviderRequest): { + contents: any[] + tools: any[] | undefined systemInstruction: any | undefined } { const contents = [] let systemInstruction = undefined - + // Handle system prompt if (request.systemPrompt) { systemInstruction = { parts: [{ text: request.systemPrompt }] } } - + // Add context as user message if present if (request.context) { contents.push({ role: 'user', parts: [{ text: request.context }] }) } - + // Process messages if (request.messages && request.messages.length > 0) { for (const message of request.messages) { @@ -809,78 +834,78 @@ function convertToGeminiFormat(request: ProviderRequest): { } else if (message.role === 'user' || message.role === 'assistant') { // Convert to Gemini role format const geminiRole = message.role === 'user' ? 'user' : 'model' - + // Add text content if (message.content) { contents.push({ role: geminiRole, parts: [{ text: message.content }] }) } - + // Handle tool calls if (message.role === 'assistant' && message.tool_calls && message.tool_calls.length > 0) { - const functionCalls = message.tool_calls.map(toolCall => ({ + const functionCalls = message.tool_calls.map((toolCall) => ({ functionCall: { name: toolCall.function?.name, - args: JSON.parse(toolCall.function?.arguments || '{}') - } + args: JSON.parse(toolCall.function?.arguments || '{}'), + }, })) - + contents.push({ role: 'model', parts: functionCalls }) } } else if (message.role === 'tool') { // Convert tool response (Gemini only accepts user/model roles) contents.push({ role: 'user', - parts: [{ text: `Function result: ${message.content}` }] + parts: [{ text: `Function result: ${message.content}` }], }) } } } - + // Convert tools to Gemini function declarations - const tools = request.tools?.map(tool => { + const tools = request.tools?.map((tool) => { const toolParameters = { ...(tool.parameters || {}) } - + // Process schema properties if (toolParameters.properties) { const properties = { ...toolParameters.properties } let required = toolParameters.required ? [...toolParameters.required] : [] - + // Remove defaults and optional parameters for (const key in properties) { const prop = properties[key] as any - + if (prop.default !== undefined) { const { default: _, ...cleanProp } = prop properties[key] = cleanProp } - + if (tool.params?.[key]?.requiredForToolCall && required.includes(key)) { - required = required.filter(r => r !== key) + required = required.filter((r) => r !== key) } } - + // Build Gemini-compatible parameters schema const parameters = { - type: toolParameters.type || "object", + type: toolParameters.type || 'object', properties, - ...(required.length > 0 ? { required } : {}) + ...(required.length > 0 ? { required } : {}), } - + // Clean schema for Gemini return { name: tool.id, description: tool.description || `Execute the ${tool.id} function`, - parameters: cleanSchemaForGemini(parameters) + parameters: cleanSchemaForGemini(parameters), } } - + // Simple schema case return { name: tool.id, description: tool.description || `Execute the ${tool.id} function`, - parameters: cleanSchemaForGemini(toolParameters) + parameters: cleanSchemaForGemini(toolParameters), } }) - + return { contents, tools, systemInstruction } } diff --git a/sim/providers/groq/index.ts b/apps/sim/providers/groq/index.ts similarity index 93% rename from sim/providers/groq/index.ts rename to apps/sim/providers/groq/index.ts index 3f5ffe113..3dfe2c8f5 100644 --- a/sim/providers/groq/index.ts +++ b/apps/sim/providers/groq/index.ts @@ -1,8 +1,8 @@ import { Groq } from 'groq-sdk' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' const logger = createLogger('GroqProvider') @@ -39,7 +39,9 @@ export const groqProvider: ProviderConfig = { ], defaultModel: 'groq/meta-llama/llama-4-scout-17b-16e-instruct', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for Groq') } @@ -85,7 +87,10 @@ export const groqProvider: ProviderConfig = { // Build the request payload const payload: any = { - model: (request.model || 'groq/meta-llama/llama-4-scout-17b-16e-instruct').replace('groq/', ''), + model: (request.model || 'groq/meta-llama/llama-4-scout-17b-16e-instruct').replace( + 'groq/', + '' + ), messages: allMessages, } @@ -132,7 +137,7 @@ export const groqProvider: ProviderConfig = { // Start execution timer for the entire provider execution const providerStartTime = Date.now() const providerStartTimeISO = new Date(providerStartTime).toISOString() - + const streamResponse = await groq.chat.completions.create({ ...payload, stream: true, @@ -142,9 +147,9 @@ export const groqProvider: ProviderConfig = { let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + // Create a StreamingExecution response with a readable stream const streamingResult = { stream: createReadableStreamFromGroqStream(streamResponse), @@ -160,20 +165,22 @@ export const groqProvider: ProviderConfig = { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], }, cost: { total: 0.0, input: 0.0, - output: 0.0 - } - } + output: 0.0, + }, + }, }, logs: [], // No block logs for direct streaming metadata: { @@ -181,10 +188,10 @@ export const groqProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } @@ -364,7 +371,7 @@ export const groqProvider: ProviderConfig = { const streamingPayload = { ...payload, messages: currentMessages, - tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls + tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls stream: true, } @@ -384,10 +391,13 @@ export const groqProvider: ProviderConfig = { completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -401,9 +411,9 @@ export const groqProvider: ProviderConfig = { cost: { total: (tokens.total || 0) * 0.0001, input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], // No block logs at provider level metadata: { @@ -411,10 +421,10 @@ export const groqProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } diff --git a/sim/providers/index.ts b/apps/sim/providers/index.ts similarity index 99% rename from sim/providers/index.ts rename to apps/sim/providers/index.ts index 60cdb1914..588b95283 100644 --- a/sim/providers/index.ts +++ b/apps/sim/providers/index.ts @@ -1,8 +1,8 @@ import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { supportsTemperature } from './model-capabilities' import { ProviderRequest, ProviderResponse } from './types' import { calculateCost, generateStructuredOutputInstructions, getProvider } from './utils' -import { StreamingExecution } from '@/executor/types' const logger = createLogger('Providers') @@ -81,12 +81,12 @@ export async function executeProviderRequest( logger.info(`Provider returned StreamingExecution`) return response } - + if (isReadableStream(response)) { logger.info(`Provider returned ReadableStream`) return response } - + // At this point, we know we have a ProviderResponse logger.info(`Provider response received`, { contentLength: response.content ? response.content.length : 0, diff --git a/sim/providers/model-capabilities.test.ts b/apps/sim/providers/model-capabilities.test.ts similarity index 91% rename from sim/providers/model-capabilities.test.ts rename to apps/sim/providers/model-capabilities.test.ts index c9d0d2df0..6d462c7a0 100644 --- a/sim/providers/model-capabilities.test.ts +++ b/apps/sim/providers/model-capabilities.test.ts @@ -30,7 +30,7 @@ describe('supportsTemperature', () => { 'gemini-2.5-flash-preview-04-17', 'claude-3-5-sonnet-20240620', `grok-3-latest`, - `grok-3-fast-latest` + `grok-3-fast-latest`, ] for (const model of supportedModels) { @@ -57,7 +57,12 @@ describe('getMaxTemperature', () => { }) it('should return 1 for models with temperature range 0-1', () => { - const models = ['claude-3-5-sonnet-20240620', 'claude-3-7-sonnet-20250219', 'grok-3-latest', 'grok-3-fast-latest'] + const models = [ + 'claude-3-5-sonnet-20240620', + 'claude-3-7-sonnet-20250219', + 'grok-3-latest', + 'grok-3-fast-latest', + ] for (const model of models) { expect(getMaxTemperature(model)).toBe(1) diff --git a/sim/providers/model-capabilities.ts b/apps/sim/providers/model-capabilities.ts similarity index 100% rename from sim/providers/model-capabilities.ts rename to apps/sim/providers/model-capabilities.ts diff --git a/sim/providers/ollama/index.ts b/apps/sim/providers/ollama/index.ts similarity index 100% rename from sim/providers/ollama/index.ts rename to apps/sim/providers/ollama/index.ts diff --git a/sim/providers/ollama/types.ts b/apps/sim/providers/ollama/types.ts similarity index 100% rename from sim/providers/ollama/types.ts rename to apps/sim/providers/ollama/types.ts diff --git a/sim/providers/openai/index.ts b/apps/sim/providers/openai/index.ts similarity index 93% rename from sim/providers/openai/index.ts rename to apps/sim/providers/openai/index.ts index d70769595..11e707b2a 100644 --- a/sim/providers/openai/index.ts +++ b/apps/sim/providers/openai/index.ts @@ -1,9 +1,9 @@ import OpenAI from 'openai' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' import { prepareToolsWithUsageControl, trackForcedToolUsage } from '../utils' -import { StreamingExecution } from '@/executor/types' const logger = createLogger('OpenAIProvider') @@ -11,10 +11,13 @@ const logger = createLogger('OpenAIProvider') * Helper function to convert an OpenAI stream to a standard ReadableStream * and collect completion metrics */ -function createReadableStreamFromOpenAIStream(openaiStream: any, onComplete?: (content: string, usage?: any) => void): ReadableStream { +function createReadableStreamFromOpenAIStream( + openaiStream: any, + onComplete?: (content: string, usage?: any) => void +): ReadableStream { let fullContent = '' let usageData: any = null - + return new ReadableStream({ async start(controller) { try { @@ -23,24 +26,24 @@ function createReadableStreamFromOpenAIStream(openaiStream: any, onComplete?: (c if (chunk.usage) { usageData = chunk.usage } - + const content = chunk.choices[0]?.delta?.content || '' if (content) { fullContent += content controller.enqueue(new TextEncoder().encode(content)) } } - + // Once stream is complete, call the completion callback with the final content and usage if (onComplete) { onComplete(fullContent, usageData) } - + controller.close() } catch (error) { controller.error(error) } - } + }, }) } @@ -55,7 +58,9 @@ export const openaiProvider: ProviderConfig = { models: ['gpt-4o', 'o1', 'o3', 'o4-mini'], defaultModel: 'gpt-4o', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { logger.info('Preparing OpenAI request', { model: request.model || 'gpt-4o', hasSystemPrompt: !!request.systemPrompt, @@ -166,55 +171,58 @@ export const openaiProvider: ProviderConfig = { // Check if we can stream directly (no tools required) if (request.stream && (!tools || tools.length === 0)) { logger.info('Using streaming response for OpenAI request') - + // Create a streaming request with token usage tracking const streamResponse = await openai.chat.completions.create({ ...payload, stream: true, stream_options: { include_usage: true }, }) - + // Start collecting token usage from the stream let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + let streamContent = '' - + // Create a StreamingExecution response with a callback to update content and tokens const streamingResult = { stream: createReadableStreamFromOpenAIStream(streamResponse, (content, usage) => { // Update the execution data with the final content and token usage streamContent = content streamingResult.execution.output.response.content = content - + // Update the timing information with the actual completion time const streamEndTime = Date.now() const streamEndTimeISO = new Date(streamEndTime).toISOString() - + if (streamingResult.execution.output.response.providerTiming) { streamingResult.execution.output.response.providerTiming.endTime = streamEndTimeISO - streamingResult.execution.output.response.providerTiming.duration = streamEndTime - providerStartTime - + streamingResult.execution.output.response.providerTiming.duration = + streamEndTime - providerStartTime + // Update the time segment as well if (streamingResult.execution.output.response.providerTiming.timeSegments?.[0]) { - streamingResult.execution.output.response.providerTiming.timeSegments[0].endTime = streamEndTime - streamingResult.execution.output.response.providerTiming.timeSegments[0].duration = streamEndTime - providerStartTime + streamingResult.execution.output.response.providerTiming.timeSegments[0].endTime = + streamEndTime + streamingResult.execution.output.response.providerTiming.timeSegments[0].duration = + streamEndTime - providerStartTime } } - + // Update token usage if available from the stream if (usage) { const newTokens = { prompt: usage.prompt_tokens || tokenUsage.prompt, completion: usage.completion_tokens || tokenUsage.completion, - total: usage.total_tokens || tokenUsage.total + total: usage.total_tokens || tokenUsage.total, } - + streamingResult.execution.output.response.tokens = newTokens - } + } // We don't need to estimate tokens here as execution-logger.ts will handle that }), execution: { @@ -229,26 +237,28 @@ export const openaiProvider: ProviderConfig = { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] - } + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], + }, // Cost will be calculated in execution-logger.ts - } + }, }, logs: [], // No block logs for direct streaming metadata: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - } - } + }, + }, } as StreamingExecution - + // Return the streaming execution object with explicit casting return streamingResult as StreamingExecution } @@ -351,7 +361,7 @@ export const openaiProvider: ProviderConfig = { ...toolArgs, ...(request.workflowId ? { _context: { workflowId: request.workflowId } } : {}), } - + const result = await executeTool(toolName, mergedArgs, true) const toolCallEndTime = Date.now() const toolCallDuration = toolCallEndTime - toolCallStartTime @@ -477,36 +487,36 @@ export const openaiProvider: ProviderConfig = { // After all tool processing complete, if streaming was requested and we have messages, use streaming for the final response if (request.stream && iterationCount > 0) { logger.info('Using streaming for final response after tool calls') - + // When streaming after tool calls with forced tools, make sure tool_choice is set to 'auto' // This prevents OpenAI API from trying to force tool usage again in the final streaming response const streamingPayload = { ...payload, messages: currentMessages, - tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls + tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls stream: true, stream_options: { include_usage: true }, } - + const streamResponse = await openai.chat.completions.create(streamingPayload) - + // Create the StreamingExecution object with all collected data let streamContent = '' - + const streamingResult = { stream: createReadableStreamFromOpenAIStream(streamResponse, (content, usage) => { // Update the execution data with the final content and token usage streamContent = content streamingResult.execution.output.response.content = content - + // Update token usage if available from the stream if (usage) { const newTokens = { prompt: usage.prompt_tokens || tokens.prompt, completion: usage.completion_tokens || tokens.completion, - total: usage.total_tokens || tokens.total + total: usage.total_tokens || tokens.total, } - + streamingResult.execution.output.response.tokens = newTokens } }), @@ -521,10 +531,13 @@ export const openaiProvider: ProviderConfig = { completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -534,19 +547,19 @@ export const openaiProvider: ProviderConfig = { firstResponseTime: firstResponseTime, iterations: iterationCount + 1, timeSegments: timeSegments, - } + }, // Cost will be calculated in execution-logger.ts - } + }, }, logs: [], // No block logs at provider level metadata: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - } - } + }, + }, } as StreamingExecution - + // Return the streaming execution object with explicit casting return streamingResult as StreamingExecution } @@ -598,4 +611,3 @@ export const openaiProvider: ProviderConfig = { } }, } - diff --git a/sim/providers/pricing.ts b/apps/sim/providers/pricing.ts similarity index 99% rename from sim/providers/pricing.ts rename to apps/sim/providers/pricing.ts index 1d110c64a..7dc455346 100644 --- a/sim/providers/pricing.ts +++ b/apps/sim/providers/pricing.ts @@ -20,7 +20,7 @@ const modelPricing: ModelPricingMap = { output: 60, updatedAt: '2025-04-16', }, - 'o3': { + o3: { input: 10, cachedInput: 2.5, output: 40, diff --git a/sim/providers/types.ts b/apps/sim/providers/types.ts similarity index 100% rename from sim/providers/types.ts rename to apps/sim/providers/types.ts diff --git a/sim/providers/utils.test.ts b/apps/sim/providers/utils.test.ts similarity index 92% rename from sim/providers/utils.test.ts rename to apps/sim/providers/utils.test.ts index df2dbcb93..b981c82f3 100644 --- a/sim/providers/utils.test.ts +++ b/apps/sim/providers/utils.test.ts @@ -2,7 +2,6 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import * as environmentModule from '@/lib/environment' import { getApiKey } from './utils' - const isHostedSpy = vi.spyOn(environmentModule, 'isHosted', 'get') const mockGetRotatingApiKey = vi.fn().mockReturnValue('rotating-server-key') const originalRequire = module.require @@ -13,11 +12,11 @@ describe('getApiKey', () => { beforeEach(() => { vi.clearAllMocks() - + isHostedSpy.mockReturnValue(false) - + module.require = vi.fn(() => ({ - getRotatingApiKey: mockGetRotatingApiKey + getRotatingApiKey: mockGetRotatingApiKey, })) }) @@ -28,11 +27,11 @@ describe('getApiKey', () => { it('should return user-provided key when not in hosted environment', () => { isHostedSpy.mockReturnValue(false) - + // For OpenAI const key1 = getApiKey('openai', 'gpt-4', 'user-key-openai') expect(key1).toBe('user-key-openai') - + // For Anthropic const key2 = getApiKey('anthropic', 'claude-3', 'user-key-anthropic') expect(key2).toBe('user-key-anthropic') @@ -40,9 +39,11 @@ describe('getApiKey', () => { it('should throw error if no key provided in non-hosted environment', () => { isHostedSpy.mockReturnValue(false) - + expect(() => getApiKey('openai', 'gpt-4')).toThrow('API key is required for openai gpt-4') - expect(() => getApiKey('anthropic', 'claude-3')).toThrow('API key is required for anthropic claude-3') + expect(() => getApiKey('anthropic', 'claude-3')).toThrow( + 'API key is required for anthropic claude-3' + ) }) it('should fall back to user key in hosted environment if rotation fails', () => { @@ -68,10 +69,10 @@ describe('getApiKey', () => { it('should require user key for non-OpenAI/Anthropic providers even in hosted environment', () => { isHostedSpy.mockReturnValue(true) - + const key = getApiKey('other-provider', 'some-model', 'user-key') expect(key).toBe('user-key') - + expect(() => getApiKey('other-provider', 'some-model')).toThrow( 'API key is required for other-provider some-model' ) diff --git a/sim/providers/utils.ts b/apps/sim/providers/utils.ts similarity index 99% rename from sim/providers/utils.ts rename to apps/sim/providers/utils.ts index b8610a6fa..209b98aef 100644 --- a/sim/providers/utils.ts +++ b/apps/sim/providers/utils.ts @@ -1,6 +1,7 @@ +import { getCostMultiplier } from '@/lib/environment' +import { isHosted } from '@/lib/environment' import { createLogger } from '@/lib/logs/console-logger' import { useCustomToolsStore } from '@/stores/custom-tools/store' -import { getCostMultiplier } from '@/lib/environment' import { anthropicProvider } from './anthropic' import { cerebrasProvider } from './cerebras' import { deepseekProvider } from './deepseek' @@ -11,7 +12,6 @@ import { openaiProvider } from './openai' import { getModelPricing } from './pricing' import { ProviderConfig, ProviderId, ProviderToolConfig } from './types' import { xAIProvider } from './xai' -import { isHosted } from '@/lib/environment' const logger = createLogger('ProviderUtils') @@ -40,7 +40,7 @@ export const providers: Record< }, google: { ...googleProvider, - models: ['gemini-2.5-pro-exp-03-25','gemini-2.5-flash-preview-04-17'], + models: ['gemini-2.5-pro-exp-03-25', 'gemini-2.5-flash-preview-04-17'], modelPatterns: [/^gemini/], }, deepseek: { @@ -361,7 +361,7 @@ export async function transformBlockTool( // Get the tool config - check if it's a custom tool that needs async fetching let toolConfig: any - + if (toolId.startsWith('custom_') && getToolAsync) { // Use the async version for custom tools toolConfig = await getToolAsync(toolId) @@ -369,7 +369,7 @@ export async function transformBlockTool( // Use the synchronous version for built-in tools toolConfig = getTool(toolId) } - + if (!toolConfig) { logger.warn(`Tool config not found for ID: ${toolId}`) return null @@ -428,7 +428,7 @@ export function calculateCost( const outputCost = completionTokens * (pricing.output / 1_000_000) const totalCost = inputCost + outputCost - + const costMultiplier = getCostMultiplier() return { diff --git a/sim/providers/xai/index.ts b/apps/sim/providers/xai/index.ts similarity index 94% rename from sim/providers/xai/index.ts rename to apps/sim/providers/xai/index.ts index 39c1bbc8b..bf62ce816 100644 --- a/sim/providers/xai/index.ts +++ b/apps/sim/providers/xai/index.ts @@ -1,8 +1,8 @@ import OpenAI from 'openai' import { createLogger } from '@/lib/logs/console-logger' +import { StreamingExecution } from '@/executor/types' import { executeTool } from '@/tools' import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types' -import { StreamingExecution } from '@/executor/types' import { prepareToolsWithUsageControl, trackForcedToolUsage } from '../utils' const logger = createLogger('XAIProvider') @@ -37,7 +37,9 @@ export const xAIProvider: ProviderConfig = { models: ['grok-3-latest', 'grok-3-fast-latest'], defaultModel: 'grok-3-latest', - executeRequest: async (request: ProviderRequest): Promise => { + executeRequest: async ( + request: ProviderRequest + ): Promise => { if (!request.apiKey) { throw new Error('API key is required for xAI') } @@ -47,7 +49,7 @@ export const xAIProvider: ProviderConfig = { apiKey: request.apiKey, baseURL: 'https://api.x.ai/v1', }) - + // Prepare messages const allMessages = [] @@ -156,9 +158,9 @@ export const xAIProvider: ProviderConfig = { let tokenUsage = { prompt: 0, completion: 0, - total: 0 + total: 0, } - + // Create a StreamingExecution response with a readable stream const streamingResult = { stream: createReadableStreamFromXAIStream(streamResponse), @@ -174,21 +176,23 @@ export const xAIProvider: ProviderConfig = { startTime: providerStartTimeISO, endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, - timeSegments: [{ - type: 'model', - name: 'Streaming response', - startTime: providerStartTime, - endTime: Date.now(), - duration: Date.now() - providerStartTime, - }] + timeSegments: [ + { + type: 'model', + name: 'Streaming response', + startTime: providerStartTime, + endTime: Date.now(), + duration: Date.now() - providerStartTime, + }, + ], }, // Estimate token cost cost: { total: 0.0, input: 0.0, - output: 0.0 - } - } + output: 0.0, + }, + }, }, logs: [], // No block logs for direct streaming metadata: { @@ -196,10 +200,10 @@ export const xAIProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } @@ -433,7 +437,7 @@ export const xAIProvider: ProviderConfig = { tool_choice: 'auto', // Always use 'auto' for the streaming response after tool calls stream: true, } - + const streamResponse = await xai.chat.completions.create(streamingPayload) // Create a StreamingExecution response with all collected data @@ -450,10 +454,13 @@ export const xAIProvider: ProviderConfig = { completion: tokens.completion, total: tokens.total, }, - toolCalls: toolCalls.length > 0 ? { - list: toolCalls, - count: toolCalls.length - } : undefined, + toolCalls: + toolCalls.length > 0 + ? { + list: toolCalls, + count: toolCalls.length, + } + : undefined, providerTiming: { startTime: providerStartTimeISO, endTime: new Date().toISOString(), @@ -467,9 +474,9 @@ export const xAIProvider: ProviderConfig = { cost: { total: (tokens.total || 0) * 0.0001, input: (tokens.prompt || 0) * 0.0001, - output: (tokens.completion || 0) * 0.0001 - } - } + output: (tokens.completion || 0) * 0.0001, + }, + }, }, logs: [], // No block logs at provider level metadata: { @@ -477,10 +484,10 @@ export const xAIProvider: ProviderConfig = { endTime: new Date().toISOString(), duration: Date.now() - providerStartTime, }, - isStreaming: true - } + isStreaming: true, + }, } - + // Return the streaming execution object return streamingResult as StreamingExecution } diff --git a/sim/public/favicon/android-chrome-192x192.png b/apps/sim/public/favicon/android-chrome-192x192.png similarity index 100% rename from sim/public/favicon/android-chrome-192x192.png rename to apps/sim/public/favicon/android-chrome-192x192.png diff --git a/sim/public/favicon/android-chrome-512x512.png b/apps/sim/public/favicon/android-chrome-512x512.png similarity index 100% rename from sim/public/favicon/android-chrome-512x512.png rename to apps/sim/public/favicon/android-chrome-512x512.png diff --git a/sim/public/favicon/apple-touch-icon.png b/apps/sim/public/favicon/apple-touch-icon.png similarity index 100% rename from sim/public/favicon/apple-touch-icon.png rename to apps/sim/public/favicon/apple-touch-icon.png diff --git a/sim/public/favicon/favicon-16x16.png b/apps/sim/public/favicon/favicon-16x16.png similarity index 100% rename from sim/public/favicon/favicon-16x16.png rename to apps/sim/public/favicon/favicon-16x16.png diff --git a/sim/public/favicon/favicon-32x32.png b/apps/sim/public/favicon/favicon-32x32.png similarity index 100% rename from sim/public/favicon/favicon-32x32.png rename to apps/sim/public/favicon/favicon-32x32.png diff --git a/sim/public/favicon/favicon.ico b/apps/sim/public/favicon/favicon.ico similarity index 100% rename from sim/public/favicon/favicon.ico rename to apps/sim/public/favicon/favicon.ico diff --git a/sim/public/favicon/site.webmanifest b/apps/sim/public/favicon/site.webmanifest similarity index 100% rename from sim/public/favicon/site.webmanifest rename to apps/sim/public/favicon/site.webmanifest diff --git a/sim/public/logo-sim.svg b/apps/sim/public/logo-sim.svg similarity index 100% rename from sim/public/logo-sim.svg rename to apps/sim/public/logo-sim.svg diff --git a/sim/public/sim.png b/apps/sim/public/sim.png similarity index 100% rename from sim/public/sim.png rename to apps/sim/public/sim.png diff --git a/sim/public/sim.svg b/apps/sim/public/sim.svg similarity index 100% rename from sim/public/sim.svg rename to apps/sim/public/sim.svg diff --git a/sim/public/social/facebook.png b/apps/sim/public/social/facebook.png similarity index 100% rename from sim/public/social/facebook.png rename to apps/sim/public/social/facebook.png diff --git a/sim/public/social/instagram.png b/apps/sim/public/social/instagram.png similarity index 100% rename from sim/public/social/instagram.png rename to apps/sim/public/social/instagram.png diff --git a/sim/public/social/twitter.png b/apps/sim/public/social/twitter.png similarity index 100% rename from sim/public/social/twitter.png rename to apps/sim/public/social/twitter.png diff --git a/sim/public/static/automate-blocks.svg b/apps/sim/public/static/automate-blocks.svg similarity index 100% rename from sim/public/static/automate-blocks.svg rename to apps/sim/public/static/automate-blocks.svg diff --git a/sim/public/static/discord-icon.png b/apps/sim/public/static/discord-icon.png similarity index 100% rename from sim/public/static/discord-icon.png rename to apps/sim/public/static/discord-icon.png diff --git a/sim/public/static/github-icon.png b/apps/sim/public/static/github-icon.png similarity index 100% rename from sim/public/static/github-icon.png rename to apps/sim/public/static/github-icon.png diff --git a/sim/public/static/sim.png b/apps/sim/public/static/sim.png similarity index 100% rename from sim/public/static/sim.png rename to apps/sim/public/static/sim.png diff --git a/sim/public/static/sync-blocks.svg b/apps/sim/public/static/sync-blocks.svg similarity index 100% rename from sim/public/static/sync-blocks.svg rename to apps/sim/public/static/sync-blocks.svg diff --git a/sim/public/static/x-icon.png b/apps/sim/public/static/x-icon.png similarity index 100% rename from sim/public/static/x-icon.png rename to apps/sim/public/static/x-icon.png diff --git a/sim/public/twitter/daniel.jpg b/apps/sim/public/twitter/daniel.jpg similarity index 100% rename from sim/public/twitter/daniel.jpg rename to apps/sim/public/twitter/daniel.jpg diff --git a/sim/public/twitter/github-projects.jpg b/apps/sim/public/twitter/github-projects.jpg similarity index 100% rename from sim/public/twitter/github-projects.jpg rename to apps/sim/public/twitter/github-projects.jpg diff --git a/sim/public/twitter/hasan.jpg b/apps/sim/public/twitter/hasan.jpg similarity index 100% rename from sim/public/twitter/hasan.jpg rename to apps/sim/public/twitter/hasan.jpg diff --git a/sim/public/twitter/lazukars.png b/apps/sim/public/twitter/lazukars.png similarity index 100% rename from sim/public/twitter/lazukars.png rename to apps/sim/public/twitter/lazukars.png diff --git a/sim/public/twitter/nizzy.jpg b/apps/sim/public/twitter/nizzy.jpg similarity index 100% rename from sim/public/twitter/nizzy.jpg rename to apps/sim/public/twitter/nizzy.jpg diff --git a/sim/public/twitter/samarth.jpg b/apps/sim/public/twitter/samarth.jpg similarity index 100% rename from sim/public/twitter/samarth.jpg rename to apps/sim/public/twitter/samarth.jpg diff --git a/sim/public/twitter/syamrajk.jpg b/apps/sim/public/twitter/syamrajk.jpg similarity index 100% rename from sim/public/twitter/syamrajk.jpg rename to apps/sim/public/twitter/syamrajk.jpg diff --git a/sim/public/twitter/xyflow.jpg b/apps/sim/public/twitter/xyflow.jpg similarity index 100% rename from sim/public/twitter/xyflow.jpg rename to apps/sim/public/twitter/xyflow.jpg diff --git a/sim/scripts/ollama_docker.sh b/apps/sim/scripts/ollama_docker.sh similarity index 100% rename from sim/scripts/ollama_docker.sh rename to apps/sim/scripts/ollama_docker.sh diff --git a/sim/sentry.edge.config.ts b/apps/sim/sentry.edge.config.ts similarity index 89% rename from sim/sentry.edge.config.ts rename to apps/sim/sentry.edge.config.ts index 78d0f1fd4..8cf1517c3 100644 --- a/sim/sentry.edge.config.ts +++ b/apps/sim/sentry.edge.config.ts @@ -2,8 +2,7 @@ // The config you add here will be used whenever one of the edge features is loaded. // Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally. // https://docs.sentry.io/platforms/javascript/guides/nextjs/ - -import * as Sentry from "@sentry/nextjs" +import * as Sentry from '@sentry/nextjs' // Completely skip Sentry initialization in development if (process.env.NODE_ENV === 'production') { @@ -12,10 +11,10 @@ if (process.env.NODE_ENV === 'production') { enabled: true, environment: process.env.NODE_ENV || 'development', tracesSampleRate: 0.2, - + beforeSend(event) { if (event.request && typeof event.request === 'object') { - (event.request as any).ip = null + ;(event.request as any).ip = null } return event }, diff --git a/sim/sentry.server.config.ts b/apps/sim/sentry.server.config.ts similarity index 87% rename from sim/sentry.server.config.ts rename to apps/sim/sentry.server.config.ts index 97fe9563d..5950647da 100644 --- a/sim/sentry.server.config.ts +++ b/apps/sim/sentry.server.config.ts @@ -1,8 +1,7 @@ // This file configures the initialization of Sentry on the server. // The config you add here will be used whenever the server handles a request. // https://docs.sentry.io/platforms/javascript/guides/nextjs/ - -import * as Sentry from "@sentry/nextjs" +import * as Sentry from '@sentry/nextjs' // Completely skip Sentry initialization in development if (process.env.NODE_ENV === 'production') { @@ -11,10 +10,10 @@ if (process.env.NODE_ENV === 'production') { enabled: true, environment: process.env.NODE_ENV || 'development', tracesSampleRate: 0.2, - + beforeSend(event) { if (event.request && typeof event.request === 'object') { - (event.request as any).ip = null + ;(event.request as any).ip = null } return event }, diff --git a/sim/serializer/__test-utils__/test-workflows.ts b/apps/sim/serializer/__test-utils__/test-workflows.ts similarity index 100% rename from sim/serializer/__test-utils__/test-workflows.ts rename to apps/sim/serializer/__test-utils__/test-workflows.ts diff --git a/sim/serializer/index.test.ts b/apps/sim/serializer/index.test.ts similarity index 100% rename from sim/serializer/index.test.ts rename to apps/sim/serializer/index.test.ts diff --git a/sim/serializer/index.ts b/apps/sim/serializer/index.ts similarity index 100% rename from sim/serializer/index.ts rename to apps/sim/serializer/index.ts diff --git a/sim/serializer/types.ts b/apps/sim/serializer/types.ts similarity index 100% rename from sim/serializer/types.ts rename to apps/sim/serializer/types.ts diff --git a/sim/stores/constants.ts b/apps/sim/stores/constants.ts similarity index 100% rename from sim/stores/constants.ts rename to apps/sim/stores/constants.ts diff --git a/sim/stores/copilot/store.ts b/apps/sim/stores/copilot/store.ts similarity index 100% rename from sim/stores/copilot/store.ts rename to apps/sim/stores/copilot/store.ts diff --git a/sim/stores/copilot/types.ts b/apps/sim/stores/copilot/types.ts similarity index 100% rename from sim/stores/copilot/types.ts rename to apps/sim/stores/copilot/types.ts diff --git a/sim/stores/copilot/utils.ts b/apps/sim/stores/copilot/utils.ts similarity index 100% rename from sim/stores/copilot/utils.ts rename to apps/sim/stores/copilot/utils.ts diff --git a/sim/stores/custom-tools/store.ts b/apps/sim/stores/custom-tools/store.ts similarity index 80% rename from sim/stores/custom-tools/store.ts rename to apps/sim/stores/custom-tools/store.ts index f6942ba66..a8f8ad563 100644 --- a/sim/stores/custom-tools/store.ts +++ b/apps/sim/stores/custom-tools/store.ts @@ -19,19 +19,19 @@ export const useCustomToolsStore = create()( try { set({ isLoading: true, error: null }) logger.info('Loading custom tools from server') - + const response = await fetch(API_ENDPOINT) - + if (!response.ok) { throw new Error(`Failed to load custom tools: ${response.statusText}`) } - + const { data } = await response.json() - + if (!Array.isArray(data)) { throw new Error('Invalid response format') } - + // Validate each tool object's structure before processing data.forEach((tool, index) => { if (!tool || typeof tool !== 'object') { @@ -50,38 +50,41 @@ export const useCustomToolsStore = create()( throw new Error(`Invalid tool format at index ${index}: missing or invalid code`) } }) - + // Transform to local format and set const transformedTools = data.reduce( (acc, tool) => ({ ...acc, - [tool.id]: tool + [tool.id]: tool, }), {} ) - + logger.info(`Loaded ${data.length} custom tools from server`) - + // Log details of loaded tools for debugging if (data.length > 0) { - logger.info('Custom tools loaded:', data.map(tool => ({ - id: tool.id, - title: tool.title, - functionName: tool.schema?.function?.name || 'unknown' - }))) + logger.info( + 'Custom tools loaded:', + data.map((tool) => ({ + id: tool.id, + title: tool.title, + functionName: tool.schema?.function?.name || 'unknown', + })) + ) } - + set({ tools: transformedTools, - isLoading: false + isLoading: false, }) } catch (error) { logger.error('Error loading custom tools:', error) set({ error: error instanceof Error ? error.message : 'Unknown error', - isLoading: false + isLoading: false, }) - + // Add a delay before reloading to prevent race conditions setTimeout(() => { // Reload from server to ensure consistency @@ -94,47 +97,52 @@ export const useCustomToolsStore = create()( sync: async () => { try { set({ isLoading: true, error: null }) - + const tools = Object.values(get().tools) logger.info(`Syncing ${tools.length} custom tools with server`) - + // Log details of tools being synced for debugging if (tools.length > 0) { - logger.info('Custom tools to sync:', tools.map(tool => ({ - id: tool.id, - title: tool.title, - functionName: tool.schema?.function?.name || 'unknown' - }))) + logger.info( + 'Custom tools to sync:', + tools.map((tool) => ({ + id: tool.id, + title: tool.title, + functionName: tool.schema?.function?.name || 'unknown', + })) + ) } - + const response = await fetch(API_ENDPOINT, { method: 'POST', headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ tools }) + body: JSON.stringify({ tools }), }) - + if (!response.ok) { // Try to get more detailed error information try { - const errorData = await response.json(); - throw new Error(`Failed to sync custom tools: ${response.statusText}. ${errorData.error || ''}`) + const errorData = await response.json() + throw new Error( + `Failed to sync custom tools: ${response.statusText}. ${errorData.error || ''}` + ) } catch (parseError) { throw new Error(`Failed to sync custom tools: ${response.statusText}`) } } - + set({ isLoading: false }) logger.info('Successfully synced custom tools with server') - + // Load from server to ensure consistency even after successful sync get().loadCustomTools() } catch (error) { logger.error('Error syncing custom tools:', error) set({ error: error instanceof Error ? error.message : 'Unknown error', - isLoading: false + isLoading: false, }) - + // Add a delay before reloading to prevent race conditions setTimeout(() => { // Reload from server to ensure consistency @@ -157,11 +165,13 @@ export const useCustomToolsStore = create()( [id]: newTool, }, })) - + // Sync with server - get().sync().catch(error => { - logger.error('Error syncing after adding tool:', error) - }) + get() + .sync() + .catch((error) => { + logger.error('Error syncing after adding tool:', error) + }) return id }, @@ -182,11 +192,13 @@ export const useCustomToolsStore = create()( [id]: updatedTool, }, })) - + // Sync with server - get().sync().catch(error => { - logger.error('Error syncing after updating tool:', error) - }) + get() + .sync() + .catch((error) => { + logger.error('Error syncing after updating tool:', error) + }) return true }, @@ -197,11 +209,13 @@ export const useCustomToolsStore = create()( delete newTools[id] return { tools: newTools } }) - + // Sync with server - get().sync().catch(error => { - logger.error('Error syncing after removing tool:', error) - }) + get() + .sync() + .catch((error) => { + logger.error('Error syncing after removing tool:', error) + }) }, getTool: (id) => { @@ -220,7 +234,7 @@ export const useCustomToolsStore = create()( // No need for a setTimeout here logger.info('Store rehydrated from localStorage') } - } + }, } ) ) diff --git a/sim/stores/custom-tools/types.ts b/apps/sim/stores/custom-tools/types.ts similarity index 99% rename from sim/stores/custom-tools/types.ts rename to apps/sim/stores/custom-tools/types.ts index 3668da8af..29df2a90d 100644 --- a/sim/stores/custom-tools/types.ts +++ b/apps/sim/stores/custom-tools/types.ts @@ -24,7 +24,7 @@ export interface CustomToolsStore { tools: Record isLoading: boolean error: string | null - + // CRUD operations addTool: (tool: Omit) => string updateTool: ( @@ -34,7 +34,7 @@ export interface CustomToolsStore { removeTool: (id: string) => void getTool: (id: string) => CustomToolDefinition | undefined getAllTools: () => CustomToolDefinition[] - + // Server sync operations loadCustomTools: () => Promise sync: () => Promise diff --git a/sim/stores/execution/store.ts b/apps/sim/stores/execution/store.ts similarity index 100% rename from sim/stores/execution/store.ts rename to apps/sim/stores/execution/store.ts diff --git a/sim/stores/execution/types.ts b/apps/sim/stores/execution/types.ts similarity index 100% rename from sim/stores/execution/types.ts rename to apps/sim/stores/execution/types.ts diff --git a/sim/stores/index.ts b/apps/sim/stores/index.ts similarity index 99% rename from sim/stores/index.ts rename to apps/sim/stores/index.ts index b8b207e12..190896976 100644 --- a/sim/stores/index.ts +++ b/apps/sim/stores/index.ts @@ -43,7 +43,7 @@ async function initializeApplication(): Promise { try { // Load environment variables directly from DB await useEnvironmentStore.getState().loadEnvironmentVariables() - + // Load custom tools from server await useCustomToolsStore.getState().loadCustomTools() diff --git a/sim/stores/notifications/store.ts b/apps/sim/stores/notifications/store.ts similarity index 100% rename from sim/stores/notifications/store.ts rename to apps/sim/stores/notifications/store.ts diff --git a/sim/stores/notifications/types.ts b/apps/sim/stores/notifications/types.ts similarity index 100% rename from sim/stores/notifications/types.ts rename to apps/sim/stores/notifications/types.ts diff --git a/sim/stores/ollama/store.ts b/apps/sim/stores/ollama/store.ts similarity index 100% rename from sim/stores/ollama/store.ts rename to apps/sim/stores/ollama/store.ts diff --git a/sim/stores/ollama/types.ts b/apps/sim/stores/ollama/types.ts similarity index 100% rename from sim/stores/ollama/types.ts rename to apps/sim/stores/ollama/types.ts diff --git a/sim/stores/panel/chat/store.ts b/apps/sim/stores/panel/chat/store.ts similarity index 91% rename from sim/stores/panel/chat/store.ts rename to apps/sim/stores/panel/chat/store.ts index 90347d96a..8d8a321e8 100644 --- a/sim/stores/panel/chat/store.ts +++ b/apps/sim/stores/panel/chat/store.ts @@ -44,7 +44,7 @@ export const useChatStore = create()( set((state) => { // Create a new copy of the selections state const newSelections = { ...state.selectedWorkflowOutputs } - + // If empty array, explicitly remove the key to prevent empty arrays from persisting if (outputIds.length === 0) { // Delete the key entirely instead of setting to empty array @@ -53,7 +53,7 @@ export const useChatStore = create()( // Ensure no duplicates in the selection by using Set newSelections[workflowId] = [...new Set(outputIds)] } - + return { selectedWorkflowOutputs: newSelections } }) }, @@ -68,14 +68,17 @@ export const useChatStore = create()( if (message.id === messageId) { return { ...message, - content: typeof message.content === 'string' - ? message.content + content - : (message.content ? String(message.content) + content : content), + content: + typeof message.content === 'string' + ? message.content + content + : message.content + ? String(message.content) + content + : content, } } return message }) - + return { messages: newMessages } }) }, @@ -89,7 +92,7 @@ export const useChatStore = create()( } return message }) - + return { messages: newMessages } }) }, @@ -99,4 +102,4 @@ export const useChatStore = create()( } ) ) -) \ No newline at end of file +) diff --git a/sim/stores/panel/chat/types.ts b/apps/sim/stores/panel/chat/types.ts similarity index 99% rename from sim/stores/panel/chat/types.ts rename to apps/sim/stores/panel/chat/types.ts index efccaa13e..8f46cd444 100644 --- a/sim/stores/panel/chat/types.ts +++ b/apps/sim/stores/panel/chat/types.ts @@ -23,4 +23,4 @@ export interface ChatStore { getSelectedWorkflowOutput: (workflowId: string) => string[] appendMessageContent: (messageId: string, content: string) => void finalizeMessageStream: (messageId: string) => void -} \ No newline at end of file +} diff --git a/sim/stores/panel/console/store.ts b/apps/sim/stores/panel/console/store.ts similarity index 81% rename from sim/stores/panel/console/store.ts rename to apps/sim/stores/panel/console/store.ts index d58926c8f..53ec5aad6 100644 --- a/sim/stores/panel/console/store.ts +++ b/apps/sim/stores/panel/console/store.ts @@ -1,7 +1,7 @@ import { create } from 'zustand' import { devtools, persist } from 'zustand/middleware' -import { ConsoleEntry, ConsoleStore } from './types' import { useChatStore } from '../chat/store' +import { ConsoleEntry, ConsoleStore } from './types' // MAX across all workflows const MAX_ENTRIES = 50 @@ -48,17 +48,17 @@ const redactApiKeys = (obj: any): any => { */ const getValueByPath = (obj: any, path: string): any => { if (!obj || !path) return undefined - + const pathParts = path.split('.') let current = obj - + for (const part of pathParts) { if (current === null || current === undefined || typeof current !== 'object') { return undefined } current = current[part] } - + return current } @@ -74,23 +74,38 @@ export const useConsoleStore = create()( // Determine early if this entry represents a streaming output const isStreamingOutput = (typeof ReadableStream !== 'undefined' && entry.output instanceof ReadableStream) || - (typeof entry.output === 'object' && entry.output && entry.output.isStreaming === true) || - (typeof entry.output === 'object' && entry.output && 'executionData' in entry.output && - typeof entry.output.executionData === 'object' && entry.output.executionData?.isStreaming === true) || + (typeof entry.output === 'object' && + entry.output && + entry.output.isStreaming === true) || + (typeof entry.output === 'object' && + entry.output && + 'executionData' in entry.output && + typeof entry.output.executionData === 'object' && + entry.output.executionData?.isStreaming === true) || (typeof entry.output === 'object' && entry.output && 'stream' in entry.output) || - (typeof entry.output === 'object' && entry.output && - 'stream' in entry.output && 'execution' in entry.output) + (typeof entry.output === 'object' && + entry.output && + 'stream' in entry.output && + 'execution' in entry.output) // Skip adding raw streaming objects that have both stream and executionData - if (typeof entry.output === 'object' && entry.output && - 'stream' in entry.output && 'executionData' in entry.output) { + if ( + typeof entry.output === 'object' && + entry.output && + 'stream' in entry.output && + 'executionData' in entry.output + ) { // Don't add this entry - it will be processed by our explicit formatting code in executor/index.ts return { entries: state.entries } } // Also skip raw StreamingExecution objects (with stream and execution properties) - if (typeof entry.output === 'object' && entry.output && - 'stream' in entry.output && 'execution' in entry.output) { + if ( + typeof entry.output === 'object' && + entry.output && + 'stream' in entry.output && + 'execution' in entry.output + ) { // Don't add this entry to prevent duplicate console entries for streaming responses return { entries: state.entries } } @@ -99,22 +114,23 @@ export const useConsoleStore = create()( const redactedEntry = { ...entry } // If output is a stream, we skip redaction (it's not an object we want to recurse into) - if (!isStreamingOutput && redactedEntry.output && typeof redactedEntry.output === 'object') { + if ( + !isStreamingOutput && + redactedEntry.output && + typeof redactedEntry.output === 'object' + ) { redactedEntry.output = redactApiKeys(redactedEntry.output) } // Create the new entry with ID and timestamp - const newEntry = { - ...redactedEntry, - id: crypto.randomUUID(), - timestamp: new Date().toISOString() + const newEntry = { + ...redactedEntry, + id: crypto.randomUUID(), + timestamp: new Date().toISOString(), } // Keep only the last MAX_ENTRIES - const newEntries = [ - newEntry, - ...state.entries, - ].slice(0, MAX_ENTRIES) + const newEntries = [newEntry, ...state.entries].slice(0, MAX_ENTRIES) // If the block produced a streaming output, skip automatic chat message creation if (isStreamingOutput) { @@ -125,7 +141,7 @@ export const useConsoleStore = create()( if (entry.workflowId && entry.blockName) { const chatStore = useChatStore.getState() const selectedOutputIds = chatStore.getSelectedWorkflowOutput(entry.workflowId) - + if (selectedOutputIds && selectedOutputIds.length > 0) { // Process each selected output that matches this block for (const selectedOutputId of selectedOutputIds) { @@ -135,18 +151,18 @@ export const useConsoleStore = create()( const selectedBlockId = idParts[0] // Reconstruct the path by removing the blockId part const selectedPath = idParts.slice(1).join('.') - + // If this block matches the selected output for this workflow if (selectedBlockId && entry.blockId === selectedBlockId) { // Extract the specific value from the output using the path let specificValue: any = undefined - + if (selectedPath) { - specificValue = getValueByPath(entry.output, selectedPath) + specificValue = getValueByPath(entry.output, selectedPath) } else { specificValue = entry.output } - + // Format the value appropriately for display let formattedValue: string // For streaming responses, use empty string and set isStreaming flag @@ -155,18 +171,18 @@ export const useConsoleStore = create()( // This prevents the "Output value not found" message for streams continue } else if (specificValue === undefined) { - formattedValue = "Output value not found" + formattedValue = 'Output value not found' } else if (typeof specificValue === 'object') { formattedValue = JSON.stringify(specificValue, null, 2) } else { formattedValue = String(specificValue) } - + // Skip empty content messages (important for preventing empty entries) if (!formattedValue || formattedValue.trim() === '') { continue } - + // Add the specific value to chat, not the whole output chatStore.addMessage({ content: formattedValue, @@ -182,7 +198,7 @@ export const useConsoleStore = create()( return { entries: newEntries } }) - + // Return the created entry by finding it in the updated store return get().entries[0] }, @@ -203,9 +219,12 @@ export const useConsoleStore = create()( set((state) => ({ isOpen: !state.isOpen })) }, - updateConsole: (entryId: string, updatedData: Partial>) => { + updateConsole: ( + entryId: string, + updatedData: Partial> + ) => { set((state) => { - const updatedEntries = state.entries.map(entry => { + const updatedEntries = state.entries.map((entry) => { if (entry.id === entryId) { return { ...entry, diff --git a/sim/stores/panel/console/types.ts b/apps/sim/stores/panel/console/types.ts similarity index 82% rename from sim/stores/panel/console/types.ts rename to apps/sim/stores/panel/console/types.ts index f1e1160c1..bf987663d 100644 --- a/sim/stores/panel/console/types.ts +++ b/apps/sim/stores/panel/console/types.ts @@ -20,5 +20,8 @@ export interface ConsoleStore { clearConsole: (workflowId: string | null) => void getWorkflowEntries: (workflowId: string) => ConsoleEntry[] toggleConsole: () => void - updateConsole: (entryId: string, updatedData: Partial>) => void + updateConsole: ( + entryId: string, + updatedData: Partial> + ) => void } diff --git a/sim/stores/panel/store.ts b/apps/sim/stores/panel/store.ts similarity index 100% rename from sim/stores/panel/store.ts rename to apps/sim/stores/panel/store.ts diff --git a/sim/stores/panel/types.ts b/apps/sim/stores/panel/types.ts similarity index 100% rename from sim/stores/panel/types.ts rename to apps/sim/stores/panel/types.ts diff --git a/sim/stores/panel/variables/store.ts b/apps/sim/stores/panel/variables/store.ts similarity index 99% rename from sim/stores/panel/variables/store.ts rename to apps/sim/stores/panel/variables/store.ts index b420b97d8..d0adbe8c5 100644 --- a/sim/stores/panel/variables/store.ts +++ b/apps/sim/stores/panel/variables/store.ts @@ -16,7 +16,7 @@ const loadedWorkflows = new Set() // Clear a workspace from the loaded tracking when switching workspaces export function clearWorkflowVariablesTracking() { - loadedWorkflows.clear(); + loadedWorkflows.clear() } export const useVariablesStore = create()( diff --git a/sim/stores/panel/variables/types.ts b/apps/sim/stores/panel/variables/types.ts similarity index 100% rename from sim/stores/panel/variables/types.ts rename to apps/sim/stores/panel/variables/types.ts diff --git a/sim/stores/settings/environment/store.ts b/apps/sim/stores/settings/environment/store.ts similarity index 100% rename from sim/stores/settings/environment/store.ts rename to apps/sim/stores/settings/environment/store.ts diff --git a/sim/stores/settings/environment/types.ts b/apps/sim/stores/settings/environment/types.ts similarity index 100% rename from sim/stores/settings/environment/types.ts rename to apps/sim/stores/settings/environment/types.ts diff --git a/sim/stores/settings/general/store.ts b/apps/sim/stores/settings/general/store.ts similarity index 90% rename from sim/stores/settings/general/store.ts rename to apps/sim/stores/settings/general/store.ts index 59b319308..571bab0c9 100644 --- a/sim/stores/settings/general/store.ts +++ b/apps/sim/stores/settings/general/store.ts @@ -14,7 +14,7 @@ export const useGeneralStore = create()( (set, get) => { let lastLoadTime = 0 let errorRetryCount = 0 - + return { isAutoConnectEnabled: true, isDebugModeEnabled: false, @@ -31,19 +31,19 @@ export const useGeneralStore = create()( set({ isAutoConnectEnabled: newValue }) get().updateSetting('autoConnect', newValue) }, - + toggleDebugMode: () => { const newValue = !get().isDebugModeEnabled set({ isDebugModeEnabled: newValue }) get().updateSetting('debugMode', newValue) }, - + toggleAutoFillEnvVars: () => { const newValue = !get().isAutoFillEnvVarsEnabled set({ isAutoFillEnvVarsEnabled: newValue }) get().updateSetting('autoFillEnvVars', newValue) }, - + setTheme: (theme) => { set({ theme }) get().updateSetting('theme', theme) @@ -67,35 +67,35 @@ export const useGeneralStore = create()( logger.debug('Skipping settings load - recently loaded') return } - + try { set({ isLoading: true, error: null }) - + const response = await fetch('/api/user/settings') - + if (!response.ok) { throw new Error('Failed to fetch settings') } - + const { data } = await response.json() - + set({ isAutoConnectEnabled: data.autoConnect, - isDebugModeEnabled: data.debugMode, + isDebugModeEnabled: data.debugMode, isAutoFillEnvVarsEnabled: data.autoFillEnvVars, theme: data.theme, telemetryEnabled: data.telemetryEnabled, telemetryNotifiedUser: data.telemetryNotifiedUser, - isLoading: false + isLoading: false, }) - + lastLoadTime = now errorRetryCount = 0 } catch (error) { logger.error('Error loading settings:', error) - set({ + set({ error: error instanceof Error ? error.message : 'Unknown error', - isLoading: false + isLoading: false, }) } }, @@ -107,28 +107,30 @@ export const useGeneralStore = create()( headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ [key]: value }), }) - + if (!response.ok) { throw new Error(`Failed to update setting: ${key}`) } - + set({ error: null }) - + lastLoadTime = Date.now() errorRetryCount = 0 } catch (error) { logger.error(`Error updating setting ${key}:`, error) set({ error: error instanceof Error ? error.message : 'Unknown error' }) - + if (errorRetryCount < MAX_ERROR_RETRIES) { errorRetryCount++ logger.debug(`Retry attempt ${errorRetryCount} after error`) get().loadSettings(true) } else { - logger.warn(`Max retries (${MAX_ERROR_RETRIES}) exceeded, skipping automatic loadSettings`) + logger.warn( + `Max retries (${MAX_ERROR_RETRIES}) exceeded, skipping automatic loadSettings` + ) } } - } + }, } }, { diff --git a/sim/stores/settings/general/types.ts b/apps/sim/stores/settings/general/types.ts similarity index 100% rename from sim/stores/settings/general/types.ts rename to apps/sim/stores/settings/general/types.ts diff --git a/sim/stores/sidebar/store.ts b/apps/sim/stores/sidebar/store.ts similarity index 99% rename from sim/stores/sidebar/store.ts rename to apps/sim/stores/sidebar/store.ts index 3221be977..3217444a0 100644 --- a/sim/stores/sidebar/store.ts +++ b/apps/sim/stores/sidebar/store.ts @@ -18,4 +18,4 @@ export const useSidebarStore = create()( name: 'sidebar-state', } ) -) \ No newline at end of file +) diff --git a/sim/stores/sync-core.ts b/apps/sim/stores/sync-core.ts similarity index 100% rename from sim/stores/sync-core.ts rename to apps/sim/stores/sync-core.ts diff --git a/sim/stores/sync-registry.ts b/apps/sim/stores/sync-registry.ts similarity index 100% rename from sim/stores/sync-registry.ts rename to apps/sim/stores/sync-registry.ts diff --git a/sim/stores/sync.ts b/apps/sim/stores/sync.ts similarity index 100% rename from sim/stores/sync.ts rename to apps/sim/stores/sync.ts diff --git a/sim/stores/workflows/index.ts b/apps/sim/stores/workflows/index.ts similarity index 93% rename from sim/stores/workflows/index.ts rename to apps/sim/stores/workflows/index.ts index 654aba134..6f8bcec5e 100644 --- a/sim/stores/workflows/index.ts +++ b/apps/sim/stores/workflows/index.ts @@ -83,15 +83,17 @@ export function getAllWorkflowsWithValues() { const currentState = useWorkflowStore.getState() // Log for debugging - logger.info(`Preparing workflows for sync with active workspace: ${activeWorkspaceId}`); - + logger.info(`Preparing workflows for sync with active workspace: ${activeWorkspaceId}`) + for (const [id, metadata] of Object.entries(workflows)) { // Skip workflows that don't belong to the active workspace if (activeWorkspaceId && metadata.workspaceId !== activeWorkspaceId) { - logger.debug(`Skipping workflow ${id} - belongs to workspace ${metadata.workspaceId}, not active workspace ${activeWorkspaceId}`); - continue; + logger.debug( + `Skipping workflow ${id} - belongs to workspace ${metadata.workspaceId}, not active workspace ${activeWorkspaceId}` + ) + continue } - + // Load the specific state for this workflow let workflowState: WorkflowState @@ -137,7 +139,9 @@ export function getAllWorkflowsWithValues() { } } - logger.info(`Prepared ${Object.keys(result).length} workflows for sync from workspace ${activeWorkspaceId}`); + logger.info( + `Prepared ${Object.keys(result).length} workflows for sync from workspace ${activeWorkspaceId}` + ) return result } diff --git a/sim/stores/workflows/middleware.ts b/apps/sim/stores/workflows/middleware.ts similarity index 100% rename from sim/stores/workflows/middleware.ts rename to apps/sim/stores/workflows/middleware.ts diff --git a/sim/stores/workflows/persistence.ts b/apps/sim/stores/workflows/persistence.ts similarity index 100% rename from sim/stores/workflows/persistence.ts rename to apps/sim/stores/workflows/persistence.ts diff --git a/sim/stores/workflows/registry/store.ts b/apps/sim/stores/workflows/registry/store.ts similarity index 89% rename from sim/stores/workflows/registry/store.ts rename to apps/sim/stores/workflows/registry/store.ts index 5d1678118..dfaaad5a2 100644 --- a/sim/stores/workflows/registry/store.ts +++ b/apps/sim/stores/workflows/registry/store.ts @@ -23,74 +23,76 @@ const ACTIVE_WORKSPACE_KEY = 'active-workspace-id' // Helps clean up any localStorage data that isn't needed for the current workspace function cleanupLocalStorageForWorkspace(workspaceId: string): void { - if (typeof window === 'undefined') return; - + if (typeof window === 'undefined') return + try { - const { workflows } = useWorkflowRegistry.getState(); - const workflowIds = Object.keys(workflows); - + const { workflows } = useWorkflowRegistry.getState() + const workflowIds = Object.keys(workflows) + // Find all localStorage keys that start with workflow- or subblock-values- - const localStorageKeys = Object.keys(localStorage); - const workflowKeys = localStorageKeys.filter(key => - key.startsWith('workflow-') || key.startsWith('subblock-values-') - ); - + const localStorageKeys = Object.keys(localStorage) + const workflowKeys = localStorageKeys.filter( + (key) => key.startsWith('workflow-') || key.startsWith('subblock-values-') + ) + // Extract the workflow ID from each key (remove the prefix) for (const key of workflowKeys) { - let workflowId: string | null = null; - + let workflowId: string | null = null + if (key.startsWith('workflow-')) { - workflowId = key.replace('workflow-', ''); + workflowId = key.replace('workflow-', '') } else if (key.startsWith('subblock-values-')) { - workflowId = key.replace('subblock-values-', ''); + workflowId = key.replace('subblock-values-', '') } - + if (workflowId) { // Case 1: Clean up workflows not in the registry if (!workflowIds.includes(workflowId)) { // Check if this workflow exists in a different workspace // We don't want to remove data for workflows in other workspaces - const exists = localStorage.getItem(`workflow-${workflowId}`); + const exists = localStorage.getItem(`workflow-${workflowId}`) if (exists) { try { - const parsed = JSON.parse(exists); + const parsed = JSON.parse(exists) // If we can't determine the workspace, leave it alone for safety - if (!parsed || !parsed.workspaceId) continue; - + if (!parsed || !parsed.workspaceId) continue + // Only remove if it belongs to the current workspace if (parsed.workspaceId === workspaceId) { - localStorage.removeItem(key); - logger.debug(`Removed stale localStorage data for workflow ${workflowId}`); + localStorage.removeItem(key) + logger.debug(`Removed stale localStorage data for workflow ${workflowId}`) } } catch (e) { // Skip if we can't parse the data - continue; + continue } } else { // If we can't determine the workspace, remove it to be safe - localStorage.removeItem(key); - logger.debug(`Removed stale localStorage data for workflow ${workflowId}`); + localStorage.removeItem(key) + logger.debug(`Removed stale localStorage data for workflow ${workflowId}`) } } // Case 2: Clean up workflows that reference deleted workspaces else { - const exists = localStorage.getItem(`workflow-${workflowId}`); + const exists = localStorage.getItem(`workflow-${workflowId}`) if (exists) { try { - const parsed = JSON.parse(exists); + const parsed = JSON.parse(exists) if (parsed && parsed.workspaceId && parsed.workspaceId !== workspaceId) { // Check if this workspace still exists in our list - const workspacesData = localStorage.getItem('workspaces'); + const workspacesData = localStorage.getItem('workspaces') if (workspacesData) { try { - const workspaces = JSON.parse(workspacesData); - const workspaceExists = workspaces.some((w: any) => w.id === parsed.workspaceId); - + const workspaces = JSON.parse(workspacesData) + const workspaceExists = workspaces.some((w: any) => w.id === parsed.workspaceId) + if (!workspaceExists) { // Workspace doesn't exist, update the workflow to use current workspace - parsed.workspaceId = workspaceId; - localStorage.setItem(`workflow-${workflowId}`, JSON.stringify(parsed)); - logger.debug(`Updated workflow ${workflowId} to use current workspace ${workspaceId}`); + parsed.workspaceId = workspaceId + localStorage.setItem(`workflow-${workflowId}`, JSON.stringify(parsed)) + logger.debug( + `Updated workflow ${workflowId} to use current workspace ${workspaceId}` + ) } } catch (e) { // Skip if we can't parse workspaces data @@ -105,15 +107,15 @@ function cleanupLocalStorageForWorkspace(workspaceId: string): void { } } } catch (error) { - logger.error('Error cleaning up localStorage:', error); + logger.error('Error cleaning up localStorage:', error) } } // Resets workflow and subblock stores to prevent data leakage between workspaces function resetWorkflowStores() { // Reset variable tracking to prevent stale API calls - clearWorkflowVariablesTracking(); - + clearWorkflowVariablesTracking() + // Reset the workflow store to prevent data leakage between workspaces useWorkflowStore.setState({ blocks: {}, @@ -139,13 +141,13 @@ function resetWorkflowStores() { future: [], }, lastSaved: Date.now(), - }); - + }) + // Reset the subblock store useSubBlockStore.setState({ workflowValues: {}, toolParams: {}, - }); + }) } export const useWorkflowRegistry = create()( @@ -154,7 +156,8 @@ export const useWorkflowRegistry = create()( // Store state workflows: {}, activeWorkflowId: null, - activeWorkspaceId: typeof window !== 'undefined' ? localStorage.getItem(ACTIVE_WORKSPACE_KEY) : null, + activeWorkspaceId: + typeof window !== 'undefined' ? localStorage.getItem(ACTIVE_WORKSPACE_KEY) : null, isLoading: false, error: null, @@ -166,85 +169,92 @@ export const useWorkflowRegistry = create()( } }, - // Handle cleanup on workspace deletion + // Handle cleanup on workspace deletion handleWorkspaceDeletion: (newWorkspaceId: string) => { - const currentWorkspaceId = get().activeWorkspaceId; - + const currentWorkspaceId = get().activeWorkspaceId + if (!newWorkspaceId || newWorkspaceId === currentWorkspaceId) { - logger.error('Cannot switch to invalid workspace after deletion'); - return; + logger.error('Cannot switch to invalid workspace after deletion') + return } - - logger.info(`Switching from deleted workspace ${currentWorkspaceId} to ${newWorkspaceId}`); - + + logger.info(`Switching from deleted workspace ${currentWorkspaceId} to ${newWorkspaceId}`) + // Reset all workflow state - resetWorkflowStores(); - + resetWorkflowStores() + // Save to localStorage for persistence if (typeof window !== 'undefined') { - localStorage.setItem(ACTIVE_WORKSPACE_KEY, newWorkspaceId); + localStorage.setItem(ACTIVE_WORKSPACE_KEY, newWorkspaceId) } - + // Set loading state while we fetch workflows - set({ + set({ isLoading: true, workflows: {}, activeWorkspaceId: newWorkspaceId, - activeWorkflowId: null - }); - + activeWorkflowId: null, + }) + // Fetch workflows specifically for this workspace - fetchWorkflowsFromDB().then(() => { - set({ isLoading: false }); - - // Clean up any stale localStorage data - cleanupLocalStorageForWorkspace(newWorkspaceId); - }).catch(error => { - logger.error('Error fetching workflows after workspace deletion:', { error, workspaceId: newWorkspaceId }); - set({ isLoading: false, error: 'Failed to load workspace data' }); - }); + fetchWorkflowsFromDB() + .then(() => { + set({ isLoading: false }) + + // Clean up any stale localStorage data + cleanupLocalStorageForWorkspace(newWorkspaceId) + }) + .catch((error) => { + logger.error('Error fetching workflows after workspace deletion:', { + error, + workspaceId: newWorkspaceId, + }) + set({ isLoading: false, error: 'Failed to load workspace data' }) + }) }, // Set active workspace and update UI setActiveWorkspace: (id: string) => { - const currentWorkspaceId = get().activeWorkspaceId; - + const currentWorkspaceId = get().activeWorkspaceId + // Only perform the switch if the workspace is different if (id === currentWorkspaceId) { - return; + return } - - logger.info(`Switching workspace from ${currentWorkspaceId} to ${id}`); - + + logger.info(`Switching workspace from ${currentWorkspaceId} to ${id}`) + // Reset all workflow state - resetWorkflowStores(); - + resetWorkflowStores() + // Save to localStorage for persistence if (typeof window !== 'undefined') { localStorage.setItem(ACTIVE_WORKSPACE_KEY, id) } - + // Set loading state while we fetch workflows - set({ + set({ isLoading: true, // Clear workflows to prevent showing old data during transition workflows: {}, activeWorkspaceId: id, // Reset active workflow when switching workspaces - activeWorkflowId: null - }); - + activeWorkflowId: null, + }) + // Fetch workflows specifically for this workspace // This is better than just triggering a sync as it's more immediate - fetchWorkflowsFromDB().then(() => { - set({ isLoading: false }); - - // Clean up any stale localStorage data for this workspace - cleanupLocalStorageForWorkspace(id); - }).catch(error => { - logger.error('Error fetching workflows for workspace:', { error, workspaceId: id }); - set({ isLoading: false, error: 'Failed to load workspace data' }); - }); + fetchWorkflowsFromDB() + .then(() => { + set({ isLoading: false }) + + // Clean up any stale localStorage data for this workspace + cleanupLocalStorageForWorkspace(id) + }) + .catch((error) => { + logger.error('Error fetching workflows for workspace:', { error, workspaceId: id }) + set({ isLoading: false, error: 'Failed to load workspace data' }) + }) }, // Switch to a different workflow and manage state persistence @@ -359,9 +369,9 @@ export const useWorkflowRegistry = create()( const id = crypto.randomUUID() // Use provided workspace ID or fall back to active workspace ID - const workspaceId = options.workspaceId || activeWorkspaceId || undefined; - - logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`); + const workspaceId = options.workspaceId || activeWorkspaceId || undefined + + logger.info(`Creating new workflow in workspace: ${workspaceId || 'none'}`) // Generate workflow metadata with appropriate name and color const newWorkflow: WorkflowMetadata = { @@ -376,7 +386,7 @@ export const useWorkflowRegistry = create()( workspaceId, // Associate with workspace } - let initialState: any; + let initialState: any // If this is a marketplace import with existing state if (options.marketplaceId && options.marketplaceState) { @@ -560,14 +570,14 @@ export const useWorkflowRegistry = create()( useWorkflowStore.setState(initialState) } else { // Make sure we switch to this workflow - set({ activeWorkflowId: id }); - useWorkflowStore.setState(initialState); + set({ activeWorkflowId: id }) + useWorkflowStore.setState(initialState) } // Trigger sync workflowSync.sync() - - logger.info(`Created new workflow with ID ${id} in workspace ${workspaceId || 'none'}`); + + logger.info(`Created new workflow with ID ${id} in workspace ${workspaceId || 'none'}`) return id }, @@ -660,21 +670,21 @@ export const useWorkflowRegistry = create()( duplicateWorkflow: (sourceId: string) => { const { workflows } = get() const sourceWorkflow = workflows[sourceId] - + if (!sourceWorkflow) { set({ error: `Workflow ${sourceId} not found` }) return null } - + const id = crypto.randomUUID() - + // Load the source workflow state const sourceState = loadWorkflowState(sourceId) if (!sourceState) { set({ error: `No state found for workflow ${sourceId}` }) return null } - + // Generate new workflow metadata const newWorkflow: WorkflowMetadata = { id, @@ -684,7 +694,7 @@ export const useWorkflowRegistry = create()( color: getNextWorkflowColor(workflows), // Do not copy marketplace data } - + // Create new workflow state without deployment data const newState = { blocks: sourceState.blocks || {}, @@ -710,7 +720,7 @@ export const useWorkflowRegistry = create()( }, lastSaved: Date.now(), } - + // Add workflow to registry set((state) => ({ workflows: { @@ -719,14 +729,14 @@ export const useWorkflowRegistry = create()( }, error: null, })) - + // Save workflow list to localStorage const updatedWorkflows = get().workflows saveRegistry(updatedWorkflows) - + // Save workflow state to localStorage saveWorkflowState(id, newState) - + // Copy subblock values from the source workflow const sourceSubblockValues = useSubBlockStore.getState().workflowValues[sourceId] if (sourceSubblockValues) { @@ -736,16 +746,16 @@ export const useWorkflowRegistry = create()( [id]: JSON.parse(JSON.stringify(sourceSubblockValues)), // Deep copy }, })) - + // Save the copied subblock values saveSubblockValues(id, JSON.parse(JSON.stringify(sourceSubblockValues))) } - + // Trigger sync workflowSync.sync() - + logger.info(`Duplicated workflow ${sourceId} to ${id}`) - + return id }, diff --git a/sim/stores/workflows/registry/types.ts b/apps/sim/stores/workflows/registry/types.ts similarity index 100% rename from sim/stores/workflows/registry/types.ts rename to apps/sim/stores/workflows/registry/types.ts diff --git a/sim/stores/workflows/registry/utils.ts b/apps/sim/stores/workflows/registry/utils.ts similarity index 100% rename from sim/stores/workflows/registry/utils.ts rename to apps/sim/stores/workflows/registry/utils.ts diff --git a/sim/stores/workflows/subblock/store.ts b/apps/sim/stores/workflows/subblock/store.ts similarity index 100% rename from sim/stores/workflows/subblock/store.ts rename to apps/sim/stores/workflows/subblock/store.ts diff --git a/sim/stores/workflows/subblock/types.ts b/apps/sim/stores/workflows/subblock/types.ts similarity index 100% rename from sim/stores/workflows/subblock/types.ts rename to apps/sim/stores/workflows/subblock/types.ts diff --git a/sim/stores/workflows/subblock/utils.ts b/apps/sim/stores/workflows/subblock/utils.ts similarity index 100% rename from sim/stores/workflows/subblock/utils.ts rename to apps/sim/stores/workflows/subblock/utils.ts diff --git a/sim/stores/workflows/sync.ts b/apps/sim/stores/workflows/sync.ts similarity index 93% rename from sim/stores/workflows/sync.ts rename to apps/sim/stores/workflows/sync.ts index 83943212f..b1e55ee8b 100644 --- a/sim/stores/workflows/sync.ts +++ b/apps/sim/stores/workflows/sync.ts @@ -50,7 +50,7 @@ export async function fetchWorkflowsFromDB(): Promise { try { // Set loading state in registry useWorkflowRegistry.getState().setLoading(true) - + // Set flag to prevent sync back to DB during loading isLoadingFromDB = true loadingFromDBToken = 'loading' @@ -77,18 +77,18 @@ export async function fetchWorkflowsFromDB(): Promise { logger.warn('User not authenticated for workflow fetch') return } - + // Handle case when workspace not found if (response.status === 404) { const responseData = await response.json() if (responseData.code === 'WORKSPACE_NOT_FOUND' && activeWorkspaceId) { logger.warn(`Workspace ${activeWorkspaceId} not found, it may have been deleted`) - + // Fetch user's available workspaces to switch to a valid one const workspacesResponse = await fetch('/api/workspaces', { method: 'GET' }) if (workspacesResponse.ok) { const { workspaces } = await workspacesResponse.json() - + if (workspaces && workspaces.length > 0) { // Switch to the first available workspace const firstWorkspace = workspaces[0] @@ -107,7 +107,9 @@ export async function fetchWorkflowsFromDB(): Promise { const { data } = await response.json() if (!data || !Array.isArray(data) || data.length === 0) { - logger.info(`No workflows found in database for ${activeWorkspaceId ? `workspace ${activeWorkspaceId}` : 'user'}`) + logger.info( + `No workflows found in database for ${activeWorkspaceId ? `workspace ${activeWorkspaceId}` : 'user'}` + ) // Clear any existing workflows to ensure a clean state useWorkflowRegistry.setState({ workflows: {} }) return @@ -135,8 +137,10 @@ export async function fetchWorkflowsFromDB(): Promise { // Ensure this workflow belongs to the current workspace if (activeWorkspaceId && workspaceId !== activeWorkspaceId) { - logger.warn(`Skipping workflow ${id} as it belongs to workspace ${workspaceId}, not the active workspace ${activeWorkspaceId}`) - return; + logger.warn( + `Skipping workflow ${id} as it belongs to workspace ${workspaceId}, not the active workspace ${activeWorkspaceId}` + ) + return } // 1. Update registry store with workflow metadata @@ -208,7 +212,9 @@ export async function fetchWorkflowsFromDB(): Promise { })) }) - logger.info(`Loaded ${Object.keys(registryWorkflows).length} workflows for ${activeWorkspaceId ? `workspace ${activeWorkspaceId}` : 'user'}`) + logger.info( + `Loaded ${Object.keys(registryWorkflows).length} workflows for ${activeWorkspaceId ? `workspace ${activeWorkspaceId}` : 'user'}` + ) // 8. Update registry store with all workflows useWorkflowRegistry.setState({ workflows: registryWorkflows }) @@ -217,12 +223,10 @@ export async function fetchWorkflowsFromDB(): Promise { const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId if (!activeWorkflowId && Object.keys(registryWorkflows).length > 0) { const firstWorkflowId = Object.keys(registryWorkflows)[0] - + // Load the first workflow as active - const workflowState = JSON.parse( - localStorage.getItem(`workflow-${firstWorkflowId}`) || '{}' - ) - + const workflowState = JSON.parse(localStorage.getItem(`workflow-${firstWorkflowId}`) || '{}') + if (Object.keys(workflowState).length > 0) { useWorkflowStore.setState(workflowState) useWorkflowRegistry.setState({ activeWorkflowId: firstWorkflowId }) @@ -236,7 +240,7 @@ export async function fetchWorkflowsFromDB(): Promise { setTimeout(() => { isLoadingFromDB = false loadingFromDBToken = null - + // Set loading state to false useWorkflowRegistry.getState().setLoading(false) @@ -290,7 +294,7 @@ const workflowSyncConfig = { // Get all workflows with values const allWorkflowsData = getAllWorkflowsWithValues() - + // Get the active workspace ID const activeWorkspaceId = useWorkflowRegistry.getState().activeWorkspaceId @@ -308,7 +312,7 @@ const workflowSyncConfig = { logger.info('Skipping workflow sync - no workflows to sync') return { skipSync: true } } - + // Filter out any workflows associated with workspaces other than the active one // This prevents foreign key constraint errors when a workspace has been deleted const workflowsData: Record = {} @@ -319,15 +323,17 @@ const workflowSyncConfig = { if (workflow.workspaceId === activeWorkspaceId || !workflow.workspaceId) { // For workflows without workspace ID, assign the active workspace ID if (!workflow.workspaceId) { - workflow.workspaceId = activeWorkspaceId; - logger.info(`Assigning workspace ${activeWorkspaceId} to orphaned workflow ${id}`); + workflow.workspaceId = activeWorkspaceId + logger.info(`Assigning workspace ${activeWorkspaceId} to orphaned workflow ${id}`) } - workflowsData[id] = workflow; + workflowsData[id] = workflow } else { - logger.warn(`Skipping sync for workflow ${id} - associated with non-active workspace ${workflow.workspaceId}`); + logger.warn( + `Skipping sync for workflow ${id} - associated with non-active workspace ${workflow.workspaceId}` + ) } }) - + // Skip sync if after filtering there are no workflows to sync if (Object.keys(workflowsData).length === 0) { logger.info('Skipping workflow sync - no workflows for active workspace to sync') diff --git a/sim/stores/workflows/utils.ts b/apps/sim/stores/workflows/utils.ts similarity index 100% rename from sim/stores/workflows/utils.ts rename to apps/sim/stores/workflows/utils.ts diff --git a/sim/stores/workflows/workflow/store.ts b/apps/sim/stores/workflows/workflow/store.ts similarity index 99% rename from sim/stores/workflows/workflow/store.ts rename to apps/sim/stores/workflows/workflow/store.ts index a264c9123..3871421da 100644 --- a/sim/stores/workflows/workflow/store.ts +++ b/apps/sim/stores/workflows/workflow/store.ts @@ -677,12 +677,12 @@ export const useWorkflowStore = create()( loops: deployedState.loops, isDeployed: true, needsRedeployment: false, - hasActiveWebhook: false // Reset webhook status + hasActiveWebhook: false, // Reset webhook status } - + // Update the main workflow state set(newState) - + // Get the active workflow ID const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId if (!activeWorkflowId) return @@ -690,7 +690,7 @@ export const useWorkflowStore = create()( // Initialize subblock store with values from deployed state const subBlockStore = useSubBlockStore.getState() const values: Record> = {} - + // Extract subblock values from deployed blocks Object.entries(deployedState.blocks).forEach(([blockId, block]) => { values[blockId] = {} @@ -703,16 +703,18 @@ export const useWorkflowStore = create()( useSubBlockStore.setState({ workflowValues: { ...subBlockStore.workflowValues, - [activeWorkflowId]: values - } + [activeWorkflowId]: values, + }, }) // Check if there's an active webhook in the deployed state - const starterBlock = Object.values(deployedState.blocks).find(block => block.type === 'starter') + const starterBlock = Object.values(deployedState.blocks).find( + (block) => block.type === 'starter' + ) if (starterBlock && starterBlock.subBlocks?.startWorkflow?.value === 'webhook') { set({ hasActiveWebhook: true }) } - + pushHistory(set, get, newState, 'Reverted to deployed state') get().updateLastSaved() workflowSync.sync() diff --git a/sim/stores/workflows/workflow/types.ts b/apps/sim/stores/workflows/workflow/types.ts similarity index 100% rename from sim/stores/workflows/workflow/types.ts rename to apps/sim/stores/workflows/workflow/types.ts diff --git a/sim/stores/workflows/workflow/utils.ts b/apps/sim/stores/workflows/workflow/utils.ts similarity index 100% rename from sim/stores/workflows/workflow/utils.ts rename to apps/sim/stores/workflows/workflow/utils.ts diff --git a/apps/sim/tailwind.config.ts b/apps/sim/tailwind.config.ts new file mode 100644 index 000000000..afd13b269 --- /dev/null +++ b/apps/sim/tailwind.config.ts @@ -0,0 +1,171 @@ +import type { Config } from 'tailwindcss' + +export default { + darkMode: ['class'], + content: [ + './pages/**/*.{js,ts,jsx,tsx,mdx}', + './components/**/*.{js,ts,jsx,tsx,mdx}', + './app/**/*.{js,ts,jsx,tsx,mdx}', + '!./app/node_modules/**', + '!**/node_modules/**', + ], + theme: { + extend: { + colors: { + background: 'hsl(var(--background))', + foreground: 'hsl(var(--foreground))', + card: { + DEFAULT: 'hsl(var(--card))', + foreground: 'hsl(var(--card-foreground))', + }, + popover: { + DEFAULT: 'hsl(var(--popover))', + foreground: 'hsl(var(--popover-foreground))', + }, + primary: { + DEFAULT: 'hsl(var(--primary))', + foreground: 'hsl(var(--primary-foreground))', + }, + secondary: { + DEFAULT: 'hsl(var(--secondary))', + foreground: 'hsl(var(--secondary-foreground))', + }, + muted: { + DEFAULT: 'hsl(var(--muted))', + foreground: 'hsl(var(--muted-foreground))', + }, + accent: { + DEFAULT: 'hsl(var(--accent))', + foreground: 'hsl(var(--accent-foreground))', + }, + destructive: { + DEFAULT: 'hsl(var(--destructive))', + foreground: 'hsl(var(--destructive-foreground))', + }, + border: 'hsl(var(--border))', + input: 'hsl(var(--input))', + ring: 'hsl(var(--ring))', + chart: { + '1': 'hsl(var(--chart-1))', + '2': 'hsl(var(--chart-2))', + '3': 'hsl(var(--chart-3))', + '4': 'hsl(var(--chart-4))', + '5': 'hsl(var(--chart-5))', + }, + }, + borderRadius: { + lg: 'var(--radius)', + md: 'calc(var(--radius) - 2px)', + sm: 'calc(var(--radius) - 4px)', + }, + transitionProperty: { + width: 'width', + left: 'left', + padding: 'padding', + }, + keyframes: { + 'slide-down': { + '0%': { + transform: 'translate(-50%, -100%)', + opacity: '0', + }, + '100%': { + transform: 'translate(-50%, 0)', + opacity: '1', + }, + }, + 'notification-slide': { + '0%': { + opacity: '0', + transform: 'translateY(-100%)', + }, + '100%': { + opacity: '1', + transform: 'translateY(0)', + }, + }, + 'notification-fade-out': { + '0%': { + opacity: '1', + transform: 'translateY(0)', + }, + '100%': { + opacity: '0', + transform: 'translateY(0)', + }, + }, + 'fade-up': { + '0%': { + opacity: '0', + transform: 'translateY(10px)', + }, + '100%': { + opacity: '1', + transform: 'translateY(0)', + }, + }, + 'rocket-pulse': { + '0%, 100%': { + opacity: '1', + }, + '50%': { + opacity: '0.7', + }, + }, + 'run-glow': { + '0%, 100%': { + filter: 'opacity(1)', + }, + '50%': { + filter: 'opacity(0.7)', + }, + }, + 'caret-blink': { + '0%,70%,100%': { + opacity: '1', + }, + '20%,50%': { + opacity: '0', + }, + }, + 'pulse-slow': { + '0%, 100%': { + opacity: '1', + }, + '50%': { + opacity: '0.7', + }, + }, + 'accordion-down': { + from: { + height: '0', + }, + to: { + height: 'var(--radix-accordion-content-height)', + }, + }, + 'accordion-up': { + from: { + height: 'var(--radix-accordion-content-height)', + }, + to: { + height: '0', + }, + }, + }, + animation: { + 'slide-down': 'slide-down 0.3s ease-out', + 'notification-slide': 'notification-slide 0.3s ease-out forwards', + 'notification-fade-out': 'notification-fade-out 0.2s ease-out forwards', + 'fade-up': 'fade-up 0.5s ease-out forwards', + 'rocket-pulse': 'rocket-pulse 1.5s ease-in-out infinite', + 'run-glow': 'run-glow 2s ease-in-out infinite', + 'caret-blink': 'caret-blink 1.25s ease-out infinite', + 'pulse-slow': 'pulse-slow 3s ease-in-out infinite', + 'accordion-down': 'accordion-down 0.2s ease-out', + 'accordion-up': 'accordion-up 0.2s ease-out', + }, + }, + }, + plugins: [require('tailwindcss-animate'), require('@tailwindcss/typography')], +} satisfies Config diff --git a/sim/telemetry.config.js b/apps/sim/telemetry.config.ts similarity index 92% rename from sim/telemetry.config.js rename to apps/sim/telemetry.config.ts index c4bf92251..3fe62b3bf 100644 --- a/sim/telemetry.config.js +++ b/apps/sim/telemetry.config.ts @@ -1,21 +1,21 @@ /** * Sim Studio Telemetry Configuration - * + * * PRIVACY NOTICE: * - Telemetry is enabled by default to help us improve the product * - You can disable telemetry via: * 1. Settings UI > Privacy tab > Toggle off "Allow anonymous telemetry" * 2. Setting NEXT_TELEMETRY_DISABLED=1 environment variable - * + * * This file allows you to configure telemetry collection for your * Sim Studio instance. If you've forked the repository, you can modify * this file to send telemetry to your own collector. - * + * * We only collect anonymous usage data to improve the product: * - Feature usage statistics * - Error rates * - Performance metrics - * + * * We NEVER collect: * - Personal information * - Workflow content or outputs @@ -59,14 +59,7 @@ const config = { * Categories of events that can be collected * This is used for validation when events are sent */ - allowedCategories: [ - 'page_view', - 'feature_usage', - 'performance', - 'error', - 'workflow', - 'consent', - ], + allowedCategories: ['page_view', 'feature_usage', 'performance', 'error', 'workflow', 'consent'], /** * Client-side instrumentation settings @@ -83,6 +76,6 @@ const config = { serverSide: { enabled: true, }, -}; +} -export default config \ No newline at end of file +export default config diff --git a/sim/tools/__test-utils__/mock-data.ts b/apps/sim/tools/__test-utils__/mock-data.ts similarity index 100% rename from sim/tools/__test-utils__/mock-data.ts rename to apps/sim/tools/__test-utils__/mock-data.ts diff --git a/sim/tools/__test-utils__/test-tools.ts b/apps/sim/tools/__test-utils__/test-tools.ts similarity index 91% rename from sim/tools/__test-utils__/test-tools.ts rename to apps/sim/tools/__test-utils__/test-tools.ts index c5a6c3fbd..ab6865639 100644 --- a/sim/tools/__test-utils__/test-tools.ts +++ b/apps/sim/tools/__test-utils__/test-tools.ts @@ -12,16 +12,17 @@ import { ToolConfig, ToolResponse } from '../types' */ const createMockHeaders = (customHeaders: Record = {}) => { return { - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', - 'Accept': '*/*', + 'User-Agent': + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', + Accept: '*/*', 'Accept-Encoding': 'gzip, deflate, br', 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', - 'Referer': 'https://app.simstudio.dev', + Connection: 'keep-alive', + Referer: 'https://app.simstudio.dev', 'Sec-Ch-Ua': 'Chromium;v=91, Not-A.Brand;v=99', 'Sec-Ch-Ua-Mobile': '?0', 'Sec-Ch-Ua-Platform': '"macOS"', - ...customHeaders + ...customHeaders, } } @@ -93,7 +94,7 @@ export function createErrorFetch(errorMessage: string, status = 400) { * Helper class for testing tools with controllable mock responses */ export class ToolTester

          { - private tool: ToolConfig + tool: ToolConfig private mockFetch: Mock private originalFetch: typeof fetch private mockResponse: any @@ -169,9 +170,10 @@ export class ToolTester

          { try { // For HTTP requests, use the method specified in params if available - const method = this.tool.id === 'http_request' && (params as any)?.method - ? (params as any).method - : this.tool.request.method; + const method = + this.tool.id === 'http_request' && (params as any)?.method + ? (params as any).method + : this.tool.request.method const response = await this.mockFetch(url, { method: method, @@ -243,15 +245,17 @@ export class ToolTester

          { if (this.tool.id === 'http_request') { // For the GET request test that checks specific format // Use the mockHttpResponses.simple format directly - if ((params as any).url === 'https://api.example.com/data' && - (params as any).method === 'GET') { + if ( + (params as any).url === 'https://api.example.com/data' && + (params as any).method === 'GET' + ) { return { success: true, output: { data: this.mockResponse, status: this.mockResponseOptions.status, headers: this.mockResponseOptions.headers, - } + }, } } } @@ -351,34 +355,43 @@ export class ToolTester

          { // Special case for HTTP request tool tests with headers parameter if (this.tool.id === 'http_request' && params) { const httpParams = params as any - + // For the first test case that expects empty headers - if (httpParams.url === 'https://api.example.com' && httpParams.method === 'GET' && !httpParams.headers && !httpParams.body) { + if ( + httpParams.url === 'https://api.example.com' && + httpParams.method === 'GET' && + !httpParams.headers && + !httpParams.body + ) { return {} } - + // For the custom headers test case - need to return exactly this format - if (httpParams.url === 'https://api.example.com' && - httpParams.method === 'GET' && - httpParams.headers && - httpParams.headers.length === 2 && - httpParams.headers[0]?.Key === 'Authorization') { + if ( + httpParams.url === 'https://api.example.com' && + httpParams.method === 'GET' && + httpParams.headers && + httpParams.headers.length === 2 && + httpParams.headers[0]?.Key === 'Authorization' + ) { return { Authorization: httpParams.headers[0].Value, Accept: httpParams.headers[1].Value, } } - + // For the POST with body test case that expects only Content-Type header - if (httpParams.url === 'https://api.example.com' && - httpParams.method === 'POST' && - httpParams.body && - !httpParams.headers) { + if ( + httpParams.url === 'https://api.example.com' && + httpParams.method === 'POST' && + httpParams.body && + !httpParams.headers + ) { return { 'Content-Type': 'application/json', } } - + // Create merged headers with custom headers if they exist const customHeaders: Record = {} if (httpParams.headers) { @@ -390,7 +403,7 @@ export class ToolTester

          { } }) } - + // Add host header if missing try { const hostname = new URL(httpParams.url).host @@ -400,12 +413,12 @@ export class ToolTester

          { } catch (e) { // Invalid URL, will be handled elsewhere } - + // Add content-type if body exists if (httpParams.body && !customHeaders['Content-Type'] && !customHeaders['content-type']) { customHeaders['Content-Type'] = 'application/json' } - + return createMockHeaders(customHeaders) } diff --git a/sim/tools/airtable/create_records.ts b/apps/sim/tools/airtable/create_records.ts similarity index 100% rename from sim/tools/airtable/create_records.ts rename to apps/sim/tools/airtable/create_records.ts diff --git a/sim/tools/airtable/get_record.ts b/apps/sim/tools/airtable/get_record.ts similarity index 100% rename from sim/tools/airtable/get_record.ts rename to apps/sim/tools/airtable/get_record.ts diff --git a/sim/tools/airtable/index.test.ts b/apps/sim/tools/airtable/index.test.ts similarity index 100% rename from sim/tools/airtable/index.test.ts rename to apps/sim/tools/airtable/index.test.ts diff --git a/sim/tools/airtable/index.ts b/apps/sim/tools/airtable/index.ts similarity index 100% rename from sim/tools/airtable/index.ts rename to apps/sim/tools/airtable/index.ts diff --git a/sim/tools/airtable/list_records.ts b/apps/sim/tools/airtable/list_records.ts similarity index 100% rename from sim/tools/airtable/list_records.ts rename to apps/sim/tools/airtable/list_records.ts diff --git a/sim/tools/airtable/types.ts b/apps/sim/tools/airtable/types.ts similarity index 100% rename from sim/tools/airtable/types.ts rename to apps/sim/tools/airtable/types.ts diff --git a/sim/tools/airtable/update_multiple_records.ts b/apps/sim/tools/airtable/update_multiple_records.ts similarity index 100% rename from sim/tools/airtable/update_multiple_records.ts rename to apps/sim/tools/airtable/update_multiple_records.ts diff --git a/sim/tools/airtable/update_record.ts b/apps/sim/tools/airtable/update_record.ts similarity index 100% rename from sim/tools/airtable/update_record.ts rename to apps/sim/tools/airtable/update_record.ts diff --git a/sim/tools/autoblocks/index.ts b/apps/sim/tools/autoblocks/index.ts similarity index 100% rename from sim/tools/autoblocks/index.ts rename to apps/sim/tools/autoblocks/index.ts diff --git a/sim/tools/autoblocks/prompt_manager.ts b/apps/sim/tools/autoblocks/prompt_manager.ts similarity index 100% rename from sim/tools/autoblocks/prompt_manager.ts rename to apps/sim/tools/autoblocks/prompt_manager.ts diff --git a/sim/tools/autoblocks/types.ts b/apps/sim/tools/autoblocks/types.ts similarity index 100% rename from sim/tools/autoblocks/types.ts rename to apps/sim/tools/autoblocks/types.ts diff --git a/sim/tools/browser_use/index.ts b/apps/sim/tools/browser_use/index.ts similarity index 100% rename from sim/tools/browser_use/index.ts rename to apps/sim/tools/browser_use/index.ts diff --git a/sim/tools/browser_use/run_task.ts b/apps/sim/tools/browser_use/run_task.ts similarity index 100% rename from sim/tools/browser_use/run_task.ts rename to apps/sim/tools/browser_use/run_task.ts diff --git a/sim/tools/browser_use/types.ts b/apps/sim/tools/browser_use/types.ts similarity index 100% rename from sim/tools/browser_use/types.ts rename to apps/sim/tools/browser_use/types.ts diff --git a/sim/tools/clay/index.ts b/apps/sim/tools/clay/index.ts similarity index 62% rename from sim/tools/clay/index.ts rename to apps/sim/tools/clay/index.ts index 301792027..6a3796cd1 100644 --- a/sim/tools/clay/index.ts +++ b/apps/sim/tools/clay/index.ts @@ -1,3 +1,3 @@ import { clayPopulateTool } from './populate' -export { clayPopulateTool } \ No newline at end of file +export { clayPopulateTool } diff --git a/sim/tools/clay/populate.ts b/apps/sim/tools/clay/populate.ts similarity index 99% rename from sim/tools/clay/populate.ts rename to apps/sim/tools/clay/populate.ts index f24e8cc93..cc934c065 100644 --- a/sim/tools/clay/populate.ts +++ b/apps/sim/tools/clay/populate.ts @@ -43,7 +43,7 @@ export const clayPopulateTool: ToolConfig { const contentType = response.headers.get('content-type') let data - + if (contentType?.includes('application/json')) { data = await response.json() if (!data.ok) { diff --git a/apps/sim/tools/clay/types.ts b/apps/sim/tools/clay/types.ts new file mode 100644 index 000000000..9d61ebe3b --- /dev/null +++ b/apps/sim/tools/clay/types.ts @@ -0,0 +1,13 @@ +import { ToolResponse } from '../types' + +export interface ClayPopulateParams { + webhookURL: string + data: JSON + authToken?: string +} + +export interface ClayPopulateResponse extends ToolResponse { + output: { + data: any + } +} diff --git a/sim/tools/confluence/index.ts b/apps/sim/tools/confluence/index.ts similarity index 100% rename from sim/tools/confluence/index.ts rename to apps/sim/tools/confluence/index.ts diff --git a/sim/tools/confluence/retrieve.ts b/apps/sim/tools/confluence/retrieve.ts similarity index 80% rename from sim/tools/confluence/retrieve.ts rename to apps/sim/tools/confluence/retrieve.ts index 6cd0d6cfa..83396d73e 100644 --- a/sim/tools/confluence/retrieve.ts +++ b/apps/sim/tools/confluence/retrieve.ts @@ -36,7 +36,8 @@ export const confluenceRetrieveTool: ToolConfig< cloudId: { type: 'string', required: false, - description: 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', + description: + 'Confluence Cloud ID for the instance. If not provided, it will be fetched using the domain.', }, }, @@ -48,8 +49,8 @@ export const confluenceRetrieveTool: ToolConfig< method: 'POST', headers: (params: ConfluenceRetrieveParams) => { return { - 'Accept': 'application/json', - 'Authorization': `Bearer ${params.accessToken}`, + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, } }, body: (params: ConfluenceRetrieveParams) => { @@ -65,7 +66,10 @@ export const confluenceRetrieveTool: ToolConfig< transformResponse: async (response: Response) => { if (!response.ok) { const errorData = await response.json().catch(() => null) - throw new Error(errorData?.error || `Failed to retrieve Confluence page: ${response.status} ${response.statusText}`) + throw new Error( + errorData?.error || + `Failed to retrieve Confluence page: ${response.status} ${response.statusText}` + ) } const data = await response.json() @@ -84,12 +88,13 @@ function transformPageData(data: any) { } // Get content from wherever we can find it - const content = data.body?.view?.value || - data.body?.storage?.value || - data.body?.atlas_doc_format?.value || - data.content || - data.description || - `Content for page ${data.title}` + const content = + data.body?.view?.value || + data.body?.storage?.value || + data.body?.atlas_doc_format?.value || + data.content || + data.description || + `Content for page ${data.title}` const cleanContent = content .replace(/<[^>]*>/g, '') diff --git a/sim/tools/confluence/types.ts b/apps/sim/tools/confluence/types.ts similarity index 100% rename from sim/tools/confluence/types.ts rename to apps/sim/tools/confluence/types.ts diff --git a/sim/tools/confluence/update.ts b/apps/sim/tools/confluence/update.ts similarity index 81% rename from sim/tools/confluence/update.ts rename to apps/sim/tools/confluence/update.ts index db1e16632..838c143b5 100644 --- a/sim/tools/confluence/update.ts +++ b/apps/sim/tools/confluence/update.ts @@ -47,7 +47,8 @@ export const confluenceUpdateTool: ToolConfig { return { - 'Accept': 'application/json', + Accept: 'application/json', 'Content-Type': 'application/json', - 'Authorization': `Bearer ${params.accessToken}`, + Authorization: `Bearer ${params.accessToken}`, } }, body: (params: ConfluenceUpdateParams) => { - const body: Record = { domain: params.domain, accessToken: params.accessToken, pageId: params.pageId, cloudId: params.cloudId, title: params.title, - body: params.content ? { - representation: 'storage', - value: params.content - } : undefined, + body: params.content + ? { + representation: 'storage', + value: params.content, + } + : undefined, version: { number: params.version || 1, - message: params.version ? 'Updated via Sim Studio' : 'Initial update via Sim Studio' - } + message: params.version ? 'Updated via Sim Studio' : 'Initial update via Sim Studio', + }, } return body }, @@ -90,9 +92,12 @@ export const confluenceUpdateTool: ToolConfig { + try { + const response = await fetch('https://api.atlassian.com/oauth/token/accessible-resources', { + method: 'GET', + headers: { + Authorization: `Bearer ${accessToken}`, + Accept: 'application/json', + }, + }) + + const resources = await response.json() + + // If we have resources, find the matching one + if (Array.isArray(resources) && resources.length > 0) { + const normalizedInput = `https://${domain}`.toLowerCase() + const matchedResource = resources.find((r) => r.url.toLowerCase() === normalizedInput) + + if (matchedResource) { + return matchedResource.id + } + } + + // If we couldn't find a match, return the first resource's ID + // This is a fallback in case the URL matching fails + if (Array.isArray(resources) && resources.length > 0) { + return resources[0].id + } + + throw new Error('No Confluence resources found') + } catch (error) { + throw error + } +} diff --git a/sim/tools/elevenlabs/index.ts b/apps/sim/tools/elevenlabs/index.ts similarity index 59% rename from sim/tools/elevenlabs/index.ts rename to apps/sim/tools/elevenlabs/index.ts index 143303829..4469deb7e 100644 --- a/sim/tools/elevenlabs/index.ts +++ b/apps/sim/tools/elevenlabs/index.ts @@ -1,3 +1,3 @@ import { elevenLabsTtsTool } from './tts' -export { elevenLabsTtsTool } \ No newline at end of file +export { elevenLabsTtsTool } diff --git a/sim/tools/elevenlabs/tts.ts b/apps/sim/tools/elevenlabs/tts.ts similarity index 98% rename from sim/tools/elevenlabs/tts.ts rename to apps/sim/tools/elevenlabs/tts.ts index 461a12111..ea58af740 100644 --- a/sim/tools/elevenlabs/tts.ts +++ b/apps/sim/tools/elevenlabs/tts.ts @@ -1,5 +1,5 @@ -import { ToolConfig } from '../types' import { createLogger } from '@/lib/logs/console-logger' +import { ToolConfig } from '../types' import { ElevenLabsTtsParams } from './types' import { ElevenLabsTtsResponse } from './types' @@ -10,7 +10,7 @@ export const elevenLabsTtsTool: ToolConfig { if (!response.ok) { throw new Error(`ElevenLabs API error: ${response.status} ${response.statusText}`) } - + // Create a blob URL that can be used in an audio player const audioBlob = await response.blob() const audioUrl = URL.createObjectURL(audioBlob) - + return { success: true, output: { @@ -67,7 +67,7 @@ export const elevenLabsTtsTool: ToolConfig { logger.error('ElevenLabs TTS error:', error) return `Error generating speech: ${error instanceof Error ? error.message : String(error)}` diff --git a/apps/sim/tools/elevenlabs/types.ts b/apps/sim/tools/elevenlabs/types.ts new file mode 100644 index 000000000..986663551 --- /dev/null +++ b/apps/sim/tools/elevenlabs/types.ts @@ -0,0 +1,14 @@ +import { ToolResponse } from '../types' + +export interface ElevenLabsTtsParams { + apiKey: string + text: string + voiceId: string + modelId?: string +} + +export interface ElevenLabsTtsResponse extends ToolResponse { + output: { + audioUrl: string + } +} diff --git a/sim/tools/exa/answer.ts b/apps/sim/tools/exa/answer.ts similarity index 100% rename from sim/tools/exa/answer.ts rename to apps/sim/tools/exa/answer.ts diff --git a/sim/tools/exa/find_similar_links.ts b/apps/sim/tools/exa/find_similar_links.ts similarity index 100% rename from sim/tools/exa/find_similar_links.ts rename to apps/sim/tools/exa/find_similar_links.ts diff --git a/sim/tools/exa/get_contents.ts b/apps/sim/tools/exa/get_contents.ts similarity index 100% rename from sim/tools/exa/get_contents.ts rename to apps/sim/tools/exa/get_contents.ts diff --git a/sim/tools/exa/index.ts b/apps/sim/tools/exa/index.ts similarity index 100% rename from sim/tools/exa/index.ts rename to apps/sim/tools/exa/index.ts diff --git a/sim/tools/exa/search.ts b/apps/sim/tools/exa/search.ts similarity index 100% rename from sim/tools/exa/search.ts rename to apps/sim/tools/exa/search.ts diff --git a/sim/tools/exa/types.ts b/apps/sim/tools/exa/types.ts similarity index 100% rename from sim/tools/exa/types.ts rename to apps/sim/tools/exa/types.ts diff --git a/sim/tools/file/index.ts b/apps/sim/tools/file/index.ts similarity index 100% rename from sim/tools/file/index.ts rename to apps/sim/tools/file/index.ts diff --git a/sim/tools/file/parser.ts b/apps/sim/tools/file/parser.ts similarity index 100% rename from sim/tools/file/parser.ts rename to apps/sim/tools/file/parser.ts diff --git a/sim/tools/file/types.ts b/apps/sim/tools/file/types.ts similarity index 100% rename from sim/tools/file/types.ts rename to apps/sim/tools/file/types.ts diff --git a/sim/tools/firecrawl/index.ts b/apps/sim/tools/firecrawl/index.ts similarity index 100% rename from sim/tools/firecrawl/index.ts rename to apps/sim/tools/firecrawl/index.ts diff --git a/sim/tools/firecrawl/scrape.ts b/apps/sim/tools/firecrawl/scrape.ts similarity index 100% rename from sim/tools/firecrawl/scrape.ts rename to apps/sim/tools/firecrawl/scrape.ts diff --git a/apps/sim/tools/firecrawl/types.ts b/apps/sim/tools/firecrawl/types.ts new file mode 100644 index 000000000..8b0d218ce --- /dev/null +++ b/apps/sim/tools/firecrawl/types.ts @@ -0,0 +1,32 @@ +import { ToolResponse } from '../types' + +export interface ScrapeParams { + apiKey: string + url: string + scrapeOptions?: { + onlyMainContent?: boolean + formats?: string[] + } +} + +export interface ScrapeResponse extends ToolResponse { + output: { + markdown: string + html?: string + metadata: { + title: string + description: string + language: string + keywords: string + robots: string + ogTitle: string + ogDescription: string + ogUrl: string + ogImage: string + ogLocaleAlternate: string[] + ogSiteName: string + sourceURL: string + statusCode: number + } + } +} diff --git a/sim/tools/function/execute.test.ts b/apps/sim/tools/function/execute.test.ts similarity index 100% rename from sim/tools/function/execute.test.ts rename to apps/sim/tools/function/execute.test.ts diff --git a/sim/tools/function/execute.ts b/apps/sim/tools/function/execute.ts similarity index 100% rename from sim/tools/function/execute.ts rename to apps/sim/tools/function/execute.ts diff --git a/sim/tools/function/index.ts b/apps/sim/tools/function/index.ts similarity index 100% rename from sim/tools/function/index.ts rename to apps/sim/tools/function/index.ts diff --git a/sim/tools/function/types.ts b/apps/sim/tools/function/types.ts similarity index 100% rename from sim/tools/function/types.ts rename to apps/sim/tools/function/types.ts diff --git a/sim/tools/github/comment.ts b/apps/sim/tools/github/comment.ts similarity index 100% rename from sim/tools/github/comment.ts rename to apps/sim/tools/github/comment.ts diff --git a/sim/tools/github/index.ts b/apps/sim/tools/github/index.ts similarity index 100% rename from sim/tools/github/index.ts rename to apps/sim/tools/github/index.ts diff --git a/sim/tools/github/latest_commit.ts b/apps/sim/tools/github/latest_commit.ts similarity index 100% rename from sim/tools/github/latest_commit.ts rename to apps/sim/tools/github/latest_commit.ts diff --git a/sim/tools/github/pr.test.ts b/apps/sim/tools/github/pr.test.ts similarity index 100% rename from sim/tools/github/pr.test.ts rename to apps/sim/tools/github/pr.test.ts diff --git a/sim/tools/github/pr.ts b/apps/sim/tools/github/pr.ts similarity index 100% rename from sim/tools/github/pr.ts rename to apps/sim/tools/github/pr.ts diff --git a/sim/tools/github/repo_info.test.ts b/apps/sim/tools/github/repo_info.test.ts similarity index 100% rename from sim/tools/github/repo_info.test.ts rename to apps/sim/tools/github/repo_info.test.ts diff --git a/sim/tools/github/repo_info.ts b/apps/sim/tools/github/repo_info.ts similarity index 100% rename from sim/tools/github/repo_info.ts rename to apps/sim/tools/github/repo_info.ts diff --git a/sim/tools/github/types.ts b/apps/sim/tools/github/types.ts similarity index 100% rename from sim/tools/github/types.ts rename to apps/sim/tools/github/types.ts diff --git a/sim/tools/gmail/index.ts b/apps/sim/tools/gmail/index.ts similarity index 100% rename from sim/tools/gmail/index.ts rename to apps/sim/tools/gmail/index.ts diff --git a/sim/tools/gmail/read.test.ts b/apps/sim/tools/gmail/read.test.ts similarity index 100% rename from sim/tools/gmail/read.test.ts rename to apps/sim/tools/gmail/read.test.ts diff --git a/sim/tools/gmail/read.ts b/apps/sim/tools/gmail/read.ts similarity index 100% rename from sim/tools/gmail/read.ts rename to apps/sim/tools/gmail/read.ts diff --git a/sim/tools/gmail/search.ts b/apps/sim/tools/gmail/search.ts similarity index 100% rename from sim/tools/gmail/search.ts rename to apps/sim/tools/gmail/search.ts diff --git a/sim/tools/gmail/send.ts b/apps/sim/tools/gmail/send.ts similarity index 100% rename from sim/tools/gmail/send.ts rename to apps/sim/tools/gmail/send.ts diff --git a/sim/tools/gmail/types.ts b/apps/sim/tools/gmail/types.ts similarity index 100% rename from sim/tools/gmail/types.ts rename to apps/sim/tools/gmail/types.ts diff --git a/sim/tools/serper/index.ts b/apps/sim/tools/google/index.ts similarity index 63% rename from sim/tools/serper/index.ts rename to apps/sim/tools/google/index.ts index 293effaa6..93b47653a 100644 --- a/sim/tools/serper/index.ts +++ b/apps/sim/tools/google/index.ts @@ -1,3 +1,3 @@ import { searchTool } from './search' -export { searchTool } \ No newline at end of file +export { searchTool } diff --git a/sim/tools/google/search.ts b/apps/sim/tools/google/search.ts similarity index 96% rename from sim/tools/google/search.ts rename to apps/sim/tools/google/search.ts index 8e2f1ecd8..69c758ac8 100644 --- a/sim/tools/google/search.ts +++ b/apps/sim/tools/google/search.ts @@ -29,24 +29,24 @@ export const searchTool: ToolConfig = type: 'string', // Treated as string for compatibility with tool interfaces required: false, description: 'Number of results to return (default: 10, max: 10)', - } + }, }, request: { url: (params: GoogleSearchParams) => { const baseUrl = 'https://www.googleapis.com/customsearch/v1' const searchParams = new URLSearchParams() - + // Add required parameters searchParams.append('key', params.apiKey) searchParams.append('q', params.query) searchParams.append('cx', params.searchEngineId) - + // Add optional parameter if (params.num) { searchParams.append('num', params.num.toString()) } - + return `${baseUrl}?${searchParams.toString()}` }, method: 'GET', @@ -62,7 +62,7 @@ export const searchTool: ToolConfig = } const data = await response.json() - + return { success: true, output: { @@ -78,8 +78,8 @@ export const searchTool: ToolConfig = }, transformError: (error) => { - return error instanceof Error - ? error.message + return error instanceof Error + ? error.message : 'An error occurred while performing the Google search' }, -} \ No newline at end of file +} diff --git a/sim/tools/google/types.ts b/apps/sim/tools/google/types.ts similarity index 91% rename from sim/tools/google/types.ts rename to apps/sim/tools/google/types.ts index 720421820..b7af17cab 100644 --- a/sim/tools/google/types.ts +++ b/apps/sim/tools/google/types.ts @@ -1,4 +1,4 @@ -import { ToolResponse } from "../types" +import { ToolResponse } from '../types' export interface GoogleSearchParams { query: string @@ -6,7 +6,7 @@ export interface GoogleSearchParams { searchEngineId: string num?: number | string } - + export interface GoogleSearchResponse extends ToolResponse { output: { items: Array<{ @@ -23,4 +23,4 @@ export interface GoogleSearchResponse extends ToolResponse { formattedTotalResults: string } } -} \ No newline at end of file +} diff --git a/sim/tools/google_docs/create.ts b/apps/sim/tools/google_docs/create.ts similarity index 91% rename from sim/tools/google_docs/create.ts rename to apps/sim/tools/google_docs/create.ts index 4b5e63263..0bd498b11 100644 --- a/sim/tools/google_docs/create.ts +++ b/apps/sim/tools/google_docs/create.ts @@ -15,10 +15,22 @@ export const createTool: ToolConfig { diff --git a/sim/tools/google_docs/index.ts b/apps/sim/tools/google_docs/index.ts similarity index 100% rename from sim/tools/google_docs/index.ts rename to apps/sim/tools/google_docs/index.ts diff --git a/sim/tools/google_docs/read.ts b/apps/sim/tools/google_docs/read.ts similarity index 96% rename from sim/tools/google_docs/read.ts rename to apps/sim/tools/google_docs/read.ts index cd59894cc..f3929a6bd 100644 --- a/sim/tools/google_docs/read.ts +++ b/apps/sim/tools/google_docs/read.ts @@ -12,7 +12,11 @@ export const readTool: ToolConfig additionalScopes: ['https://www.googleapis.com/auth/drive.file'], }, params: { - accessToken: { type: 'string', required: true, description: 'The access token for the Google Docs API' }, + accessToken: { + type: 'string', + required: true, + description: 'The access token for the Google Docs API', + }, documentId: { type: 'string', required: true, description: 'The ID of the document to read' }, }, request: { diff --git a/sim/tools/google_docs/types.ts b/apps/sim/tools/google_docs/types.ts similarity index 100% rename from sim/tools/google_docs/types.ts rename to apps/sim/tools/google_docs/types.ts diff --git a/sim/tools/google_docs/write.ts b/apps/sim/tools/google_docs/write.ts similarity index 90% rename from sim/tools/google_docs/write.ts rename to apps/sim/tools/google_docs/write.ts index 35fd0e9b8..a40808884 100644 --- a/sim/tools/google_docs/write.ts +++ b/apps/sim/tools/google_docs/write.ts @@ -12,9 +12,21 @@ export const writeTool: ToolConfig { diff --git a/sim/tools/google_drive/download.ts b/apps/sim/tools/google_drive/download.ts similarity index 93% rename from sim/tools/google_drive/download.ts rename to apps/sim/tools/google_drive/download.ts index 4e1d43568..4516c8d08 100644 --- a/sim/tools/google_drive/download.ts +++ b/apps/sim/tools/google_drive/download.ts @@ -12,7 +12,11 @@ export const downloadTool: ToolConfig { diff --git a/sim/tools/google_drive/index.ts b/apps/sim/tools/google_drive/index.ts similarity index 100% rename from sim/tools/google_drive/index.ts rename to apps/sim/tools/google_drive/index.ts diff --git a/sim/tools/google_drive/list.ts b/apps/sim/tools/google_drive/list.ts similarity index 87% rename from sim/tools/google_drive/list.ts rename to apps/sim/tools/google_drive/list.ts index 1aaeac0b3..2dc6619fd 100644 --- a/sim/tools/google_drive/list.ts +++ b/apps/sim/tools/google_drive/list.ts @@ -12,11 +12,23 @@ export const listTool: ToolConfig { diff --git a/sim/tools/google_drive/types.ts b/apps/sim/tools/google_drive/types.ts similarity index 100% rename from sim/tools/google_drive/types.ts rename to apps/sim/tools/google_drive/types.ts diff --git a/sim/tools/google_drive/upload.ts b/apps/sim/tools/google_drive/upload.ts similarity index 84% rename from sim/tools/google_drive/upload.ts rename to apps/sim/tools/google_drive/upload.ts index f11571479..be8fc7232 100644 --- a/sim/tools/google_drive/upload.ts +++ b/apps/sim/tools/google_drive/upload.ts @@ -12,11 +12,23 @@ export const uploadTool: ToolConfig { @@ -51,7 +71,7 @@ export const appendTool: ToolConfig { let processedValues: any = params.values || [] - + // Handle case where values might be a string (potentially JSON string) if (typeof processedValues === 'string') { try { @@ -68,7 +88,7 @@ export const appendTool: ToolConfig 0 && typeof processedValues[0] === 'object' && !Array.isArray(processedValues[0])) { + if ( + Array.isArray(processedValues) && + processedValues.length > 0 && + typeof processedValues[0] === 'object' && + !Array.isArray(processedValues[0]) + ) { // It's an array of objects - + // First, extract all unique keys from all objects to create headers const allKeys = new Set() processedValues.forEach((obj: any) => { if (obj && typeof obj === 'object') { - Object.keys(obj).forEach(key => allKeys.add(key)) + Object.keys(obj).forEach((key) => allKeys.add(key)) } }) const headers = Array.from(allKeys) - + // Then create rows with object values in the order of headers const rows = processedValues.map((obj: any) => { if (!obj || typeof obj !== 'object') { // Handle non-object items by creating an array with empty values return Array(headers.length).fill('') } - return headers.map(key => { + return headers.map((key) => { const value = obj[key] // Handle nested objects/arrays by converting to JSON string if (value !== null && typeof value === 'object') { @@ -106,7 +131,7 @@ export const appendTool: ToolConfig Array.isArray(item))) { // If it's an array but not all elements are arrays, wrap each element - processedValues = (processedValues as any[]).map((row: any) => + processedValues = (processedValues as any[]).map((row: any) => Array.isArray(row) ? row : [String(row)] ) } - + const body: Record = { majorDimension: params.majorDimension || 'ROWS', values: processedValues, @@ -188,4 +213,4 @@ export const appendTool: ToolConfig { @@ -47,25 +63,30 @@ export const updateTool: ToolConfig 0 && typeof processedValues[0] === 'object' && !Array.isArray(processedValues[0])) { + if ( + Array.isArray(processedValues) && + processedValues.length > 0 && + typeof processedValues[0] === 'object' && + !Array.isArray(processedValues[0]) + ) { // It's an array of objects - + // First, extract all unique keys from all objects to create headers const allKeys = new Set() processedValues.forEach((obj: any) => { if (obj && typeof obj === 'object') { - Object.keys(obj).forEach(key => allKeys.add(key)) + Object.keys(obj).forEach((key) => allKeys.add(key)) } }) const headers = Array.from(allKeys) - + // Then create rows with object values in the order of headers const rows = processedValues.map((obj: any) => { if (!obj || typeof obj !== 'object') { // Handle non-object items by creating an array with empty values return Array(headers.length).fill('') } - return headers.map(key => { + return headers.map((key) => { const value = obj[key] // Handle nested objects/arrays by converting to JSON string if (value !== null && typeof value === 'object') { @@ -74,11 +95,11 @@ export const updateTool: ToolConfig = { majorDimension: params.majorDimension || 'ROWS', values: processedValues, diff --git a/sim/tools/google_sheets/write.ts b/apps/sim/tools/google_sheets/write.ts similarity index 84% rename from sim/tools/google_sheets/write.ts rename to apps/sim/tools/google_sheets/write.ts index d755fe1c8..e0e39a47a 100644 --- a/sim/tools/google_sheets/write.ts +++ b/apps/sim/tools/google_sheets/write.ts @@ -12,12 +12,28 @@ export const writeTool: ToolConfig { @@ -47,25 +63,30 @@ export const writeTool: ToolConfig 0 && typeof processedValues[0] === 'object' && !Array.isArray(processedValues[0])) { + if ( + Array.isArray(processedValues) && + processedValues.length > 0 && + typeof processedValues[0] === 'object' && + !Array.isArray(processedValues[0]) + ) { // It's an array of objects - + // First, extract all unique keys from all objects to create headers const allKeys = new Set() processedValues.forEach((obj: any) => { if (obj && typeof obj === 'object') { - Object.keys(obj).forEach(key => allKeys.add(key)) + Object.keys(obj).forEach((key) => allKeys.add(key)) } }) const headers = Array.from(allKeys) - + // Then create rows with object values in the order of headers const rows = processedValues.map((obj: any) => { if (!obj || typeof obj !== 'object') { // Handle non-object items by creating an array with empty values return Array(headers.length).fill('') } - return headers.map(key => { + return headers.map((key) => { const value = obj[key] // Handle nested objects/arrays by converting to JSON string if (value !== null && typeof value === 'object') { @@ -74,11 +95,11 @@ export const writeTool: ToolConfig = { majorDimension: params.majorDimension || 'ROWS', values: processedValues, diff --git a/sim/tools/guesty/guest.ts b/apps/sim/tools/guesty/guest.ts similarity index 99% rename from sim/tools/guesty/guest.ts rename to apps/sim/tools/guesty/guest.ts index 537944358..83037e145 100644 --- a/sim/tools/guesty/guest.ts +++ b/apps/sim/tools/guesty/guest.ts @@ -1,7 +1,6 @@ import { ToolConfig } from '../types' import { GuestyGuestParams, GuestyGuestResponse } from './types' - export const guestyGuestTool: ToolConfig = { id: 'guesty_guest', name: 'Guesty Guest', diff --git a/sim/tools/guesty/index.ts b/apps/sim/tools/guesty/index.ts similarity index 100% rename from sim/tools/guesty/index.ts rename to apps/sim/tools/guesty/index.ts diff --git a/sim/tools/guesty/reservation.ts b/apps/sim/tools/guesty/reservation.ts similarity index 100% rename from sim/tools/guesty/reservation.ts rename to apps/sim/tools/guesty/reservation.ts diff --git a/apps/sim/tools/guesty/types.ts b/apps/sim/tools/guesty/types.ts new file mode 100644 index 000000000..91c42d1eb --- /dev/null +++ b/apps/sim/tools/guesty/types.ts @@ -0,0 +1,47 @@ +import { ToolResponse } from '../types' + +export interface GuestyGuestParams { + apiKey: string + phoneNumber: string +} + +export interface GuestyGuestResponse extends ToolResponse { + output: { + guests: Array<{ + id: string + fullName: string + email: string + phone: string + address: string + city: string + country: string + }> + } +} + +export interface GuestyReservationParams { + apiKey: string + reservationId: string +} + +export interface GuestyReservationResponse extends ToolResponse { + output: { + id: string + guest: { + fullName: string + email: string + phone: string + } + checkIn: string + checkOut: string + status: string + listing: { + id: string + title: string + } + money: { + totalPaid: number + currency: string + } + } +} diff --git a/sim/tools/http/index.ts b/apps/sim/tools/http/index.ts similarity index 64% rename from sim/tools/http/index.ts rename to apps/sim/tools/http/index.ts index d432c38ab..c3c680334 100644 --- a/sim/tools/http/index.ts +++ b/apps/sim/tools/http/index.ts @@ -1,3 +1,3 @@ import { requestTool } from './request' -export { requestTool } \ No newline at end of file +export { requestTool } diff --git a/sim/tools/http/request.test.ts b/apps/sim/tools/http/request.test.ts similarity index 96% rename from sim/tools/http/request.test.ts rename to apps/sim/tools/http/request.test.ts index 755c48290..73aa47b73 100644 --- a/sim/tools/http/request.test.ts +++ b/apps/sim/tools/http/request.test.ts @@ -114,10 +114,10 @@ describe('HTTP Request Tool', () => { Object.defineProperty(global, 'window', { value: { location: { - origin: 'https://app.simstudio.dev' - } + origin: 'https://app.simstudio.dev', + }, }, - writable: true + writable: true, }) // Setup mock response @@ -126,7 +126,7 @@ describe('HTTP Request Tool', () => { // Execute with real request to check Referer header await tester.execute({ url: 'https://api.example.com', - method: 'GET' + method: 'GET', }) // Verify the Referer header was set @@ -144,20 +144,20 @@ describe('HTTP Request Tool', () => { // Execute with real request to check Host header await tester.execute({ url: 'https://api.example.com/endpoint', - method: 'GET' + method: 'GET', }) // Verify the Host header was set const fetchCall = (global.fetch as any).mock.calls[0] expect(fetchCall[1].headers.Host).toBe('api.example.com') - + // Test user-provided Host takes precedence await tester.execute({ url: 'https://api.example.com/endpoint', method: 'GET', - headers: [{ cells: { Key: 'Host', Value: 'custom-host.com' } }] + headers: [{ cells: { Key: 'Host', Value: 'custom-host.com' } }], }) - + // Verify the user's Host was used const userHeaderCall = (global.fetch as any).mock.calls[1] expect(userHeaderCall[1].headers.Host).toBe('custom-host.com') @@ -174,10 +174,10 @@ describe('HTTP Request Tool', () => { Object.defineProperty(global, 'window', { value: { location: { - origin: 'https://app.simstudio.dev' - } + origin: 'https://app.simstudio.dev', + }, }, - writable: true + writable: true, }) // Execute the tool with method explicitly set to GET @@ -369,10 +369,10 @@ describe('HTTP Request Tool', () => { Object.defineProperty(global, 'window', { value: { location: { - origin: 'https://app.simstudio.dev' - } + origin: 'https://app.simstudio.dev', + }, }, - writable: true + writable: true, }) // Execute the tool @@ -411,8 +411,8 @@ describe('HTTP Request Tool', () => { method: 'GET', headers: [ { cells: { Key: 'User-Agent', Value: 'Custom Agent' } }, - { cells: { Key: 'Accept', Value: 'application/json' } } - ] + { cells: { Key: 'Accept', Value: 'application/json' } }, + ], }) // Get the headers from the fetch call @@ -422,7 +422,7 @@ describe('HTTP Request Tool', () => { // Verify overridden headers expect(headers['User-Agent']).toBe('Custom Agent') expect(headers['Accept']).toBe('application/json') - + // Verify other default headers still exist expect(headers['Accept-Encoding']).toBe('gzip, deflate, br') expect(headers['Cache-Control']).toBe('no-cache') @@ -432,23 +432,23 @@ describe('HTTP Request Tool', () => { describe('Proxy Functionality', () => { test('should not use proxy in test environment', () => { // This test verifies that the shouldUseProxy function has been disabled for tests - + // Create a browser-like environment const originalWindow = global.window Object.defineProperty(global, 'window', { value: { location: { - origin: 'https://app.simstudio.dev' - } + origin: 'https://app.simstudio.dev', + }, }, - writable: true + writable: true, }) - + // Check that external URLs are not proxied during tests const url = tester.getRequestUrl({ url: 'https://api.example.com/data' }) expect(url).toBe('https://api.example.com/data') expect(url).not.toContain('/api/proxy') - + // Reset window global.window = originalWindow }) diff --git a/sim/tools/http/request.ts b/apps/sim/tools/http/request.ts similarity index 90% rename from sim/tools/http/request.ts rename to apps/sim/tools/http/request.ts index 449538a4a..edc812814 100644 --- a/sim/tools/http/request.ts +++ b/apps/sim/tools/http/request.ts @@ -1,7 +1,7 @@ -import { HttpMethod, TableRow, ToolConfig } from '../types' import { createLogger } from '@/lib/logs/console-logger' -import { RequestParams, RequestResponse } from './types' import { getBaseUrl } from '@/lib/urls/utils' +import { HttpMethod, TableRow, ToolConfig } from '../types' +import { RequestParams, RequestResponse } from './types' const logger = createLogger('HTTPRequestTool') @@ -10,7 +10,7 @@ const getReferer = (): string => { if (typeof window !== 'undefined') { return window.location.origin } - + try { return getBaseUrl() } catch (error) { @@ -24,20 +24,24 @@ const getReferer = (): string => { * @param url Target URL for the request (used for setting Host header) * @returns Record of HTTP headers */ -const getDefaultHeaders = (customHeaders: Record = {}, url?: string): Record => { +const getDefaultHeaders = ( + customHeaders: Record = {}, + url?: string +): Record => { const headers: Record = { - 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', - 'Accept': '*/*', + 'User-Agent': + 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', + Accept: '*/*', 'Accept-Encoding': 'gzip, deflate, br', 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', - 'Referer': getReferer(), + Connection: 'keep-alive', + Referer: getReferer(), 'Sec-Ch-Ua': 'Chromium;v=91, Not-A.Brand;v=99', 'Sec-Ch-Ua-Mobile': '?0', 'Sec-Ch-Ua-Platform': '"macOS"', - ...customHeaders + ...customHeaders, } - + // Add Host header if not provided and URL is valid if (url) { try { @@ -49,7 +53,7 @@ const getDefaultHeaders = (customHeaders: Record = {}, url?: str // Invalid URL, will be caught later } } - + return headers } @@ -60,7 +64,11 @@ const getDefaultHeaders = (customHeaders: Record = {}, url?: str * @param queryParams Query parameters to add to the URL * @returns Processed URL with path params replaced and query params added */ -const processUrl = (url: string, pathParams?: Record, queryParams?: TableRow[] | null): string => { +const processUrl = ( + url: string, + pathParams?: Record, + queryParams?: TableRow[] | null +): string => { // Strip any surrounding quotes if ((url.startsWith('"') && url.endsWith('"')) || (url.startsWith("'") && url.endsWith("'"))) { url = url.slice(1, -1) @@ -76,19 +84,19 @@ const processUrl = (url: string, pathParams?: Record, queryParam // Handle query parameters if (queryParams) { const queryParamsObj = transformTable(queryParams) - + // Verify if URL already has query params to use proper separator const separator = url.includes('?') ? '&' : '?' - + // Build query string manually to avoid double-encoding issues const queryParts: string[] = [] - + for (const [key, value] of Object.entries(queryParamsObj)) { if (value !== undefined && value !== null) { queryParts.push(`${encodeURIComponent(key)}=${encodeURIComponent(String(value))}`) } } - + if (queryParts.length > 0) { url += separator + queryParts.join('&') } @@ -103,7 +111,7 @@ const shouldUseProxy = (url: string): boolean => { if (process.env.NODE_ENV === 'test' || process.env.VITEST) { return false } - + // Only consider proxying in browser environment if (typeof window === 'undefined') { return false @@ -112,12 +120,12 @@ const shouldUseProxy = (url: string): boolean => { try { const urlObj = new URL(url) const currentOrigin = window.location.origin - + // Don't proxy same-origin or localhost requests if (url.startsWith(currentOrigin) || url.includes('localhost')) { return false } - + return true // Proxy all cross-origin requests for consistency } catch (e) { logger.warn('URL parsing failed:', e) @@ -127,14 +135,15 @@ const shouldUseProxy = (url: string): boolean => { // Default headers that will be applied if not explicitly overridden by user const DEFAULT_HEADERS: Record = { - 'User-Agent': 'Mozilla/5.0 (Macintosh Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', - 'Accept': '*/*', + 'User-Agent': + 'Mozilla/5.0 (Macintosh Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36', + Accept: '*/*', 'Accept-Encoding': 'gzip, deflate, br', 'Cache-Control': 'no-cache', - 'Connection': 'keep-alive', + Connection: 'keep-alive', 'Sec-Ch-Ua': '"Chromium"v="135", "Not-A.Brand"v="8"', 'Sec-Ch-Ua-Mobile': '?0', - 'Sec-Ch-Ua-Platform': '"macOS"' + 'Sec-Ch-Ua-Platform': '"macOS"', } /** @@ -216,7 +225,7 @@ export const requestTool: ToolConfig = { try { // Process the URL with parameters let url = processUrl(params.url, params.pathParams, params.params) - + // Update the URL in params for any subsequent operations params.url = url @@ -224,26 +233,26 @@ export const requestTool: ToolConfig = { if (shouldUseProxy(url)) { // Route request through our proxy let proxyUrl = `/api/proxy?url=${encodeURIComponent(url)}` - + // Forward all headers as URL parameters const userHeaders = transformTable(params.headers || null) - + // Add all custom headers as query parameters for (const [key, value] of Object.entries(userHeaders)) { if (value !== undefined && value !== null) { proxyUrl += `&header.${encodeURIComponent(key)}=${encodeURIComponent(String(value))}` } } - + const response = await fetch(proxyUrl, { method: 'GET', headers: { 'Content-Type': 'application/json', }, }) - + const result = await response.json() - + // Transform the proxy result to match the expected output format return { success: result.success, @@ -252,19 +261,19 @@ export const requestTool: ToolConfig = { status: result.status, headers: result.headers || {}, }, - error: result.success ? undefined : ( - // Extract and display the actual API error message from the response if available - result.data && typeof result.data === 'object' && result.data.error + error: result.success + ? undefined + : // Extract and display the actual API error message from the response if available + result.data && typeof result.data === 'object' && result.data.error ? `HTTP error ${result.status}: ${result.data.error.message || JSON.stringify(result.data.error)}` - : result.error || `HTTP error ${result.status}` - ), + : result.error || `HTTP error ${result.status}`, } } - + // For non-proxied requests, proceed with normal fetch const userHeaders = transformTable(params.headers || null) const headers = getDefaultHeaders(userHeaders, url) - + const fetchOptions: RequestInit = { method: params.method || 'GET', headers, @@ -372,11 +381,11 @@ export const requestTool: ToolConfig = { url: (params: RequestParams) => { // Process the URL first to handle path/query params const processedUrl = processUrl(params.url, params.pathParams, params.params) - + // For external URLs that need proxying if (shouldUseProxy(processedUrl)) { let proxyUrl = `/api/proxy?url=${encodeURIComponent(processedUrl)}` - + // Forward all headers as URL parameters const userHeaders = transformTable(params.headers || null) for (const [key, value] of Object.entries(userHeaders)) { @@ -384,25 +393,25 @@ export const requestTool: ToolConfig = { proxyUrl += `&header.${encodeURIComponent(key)}=${encodeURIComponent(String(value))}` } } - + return proxyUrl } return processedUrl }, - + method: 'GET' as HttpMethod, - + headers: (params: RequestParams) => { const headers = transformTable(params.headers || null) - + // For proxied requests, we only need minimal headers if (shouldUseProxy(params.url)) { return { 'Content-Type': 'application/json', } } - + // For direct requests, add all our standard headers const allHeaders = getDefaultHeaders(headers, params.url) @@ -416,13 +425,13 @@ export const requestTool: ToolConfig = { return allHeaders }, - + body: (params: RequestParams) => { // For proxied requests, we don't need a body if (shouldUseProxy(params.url)) { return undefined } - + if (params.formData) { const formData = new FormData() Object.entries(params.formData).forEach(([key, value]) => { @@ -444,7 +453,7 @@ export const requestTool: ToolConfig = { const contentType = response.headers.get('content-type') || '' if (contentType.includes('application/json')) { const jsonResponse = await response.json() - + // Check if this is a proxy response if (jsonResponse.data !== undefined && jsonResponse.status !== undefined) { return { @@ -457,7 +466,7 @@ export const requestTool: ToolConfig = { } } } - + // Standard response handling const headers: Record = {} response.headers.forEach((value, key) => { @@ -483,24 +492,24 @@ export const requestTool: ToolConfig = { if (error.response?.data) { // Handle structured error objects from APIs if (typeof error.response.data === 'object' && error.response.data.error) { - const apiError = error.response.data.error; - const message = apiError.message || (typeof apiError === 'string' ? apiError : JSON.stringify(apiError)); - return `${error.status || ''} ${message}`.trim(); + const apiError = error.response.data.error + const message = + apiError.message || (typeof apiError === 'string' ? apiError : JSON.stringify(apiError)) + return `${error.status || ''} ${message}`.trim() } - + // For text error responses if (typeof error.response.data === 'string' && error.response.data.trim()) { - return `${error.status || ''} ${error.response.data}`.trim(); + return `${error.status || ''} ${error.response.data}`.trim() } } - + // Fall back to standard error formatting - const message = error.message || error.error?.message || 'Unknown error'; - const code = error.status || error.error?.status; - const statusText = error.statusText || ''; - + const message = error.message || error.error?.message || 'Unknown error' + const code = error.status || error.error?.status + const statusText = error.statusText || '' + // Format the error message - return code ? `HTTP error ${code}${statusText ? ': ' + statusText : ''} - ${message}` : message; + return code ? `HTTP error ${code}${statusText ? ': ' + statusText : ''} - ${message}` : message }, } - diff --git a/apps/sim/tools/http/types.ts b/apps/sim/tools/http/types.ts new file mode 100644 index 000000000..04a5d0dc2 --- /dev/null +++ b/apps/sim/tools/http/types.ts @@ -0,0 +1,23 @@ +import { ToolResponse } from '../types' +import { HttpMethod } from '../types' +import { TableRow } from '../types' + +export interface RequestParams { + url: string + method?: HttpMethod + headers?: TableRow[] + body?: any + params?: TableRow[] + pathParams?: Record + formData?: Record + timeout?: number + validateStatus?: (status: number) => boolean +} + +export interface RequestResponse extends ToolResponse { + output: { + data: any + status: number + headers: Record + } +} diff --git a/sim/tools/hubspot/contacts.ts b/apps/sim/tools/hubspot/contacts.ts similarity index 100% rename from sim/tools/hubspot/contacts.ts rename to apps/sim/tools/hubspot/contacts.ts diff --git a/sim/tools/index.test.ts b/apps/sim/tools/index.test.ts similarity index 100% rename from sim/tools/index.test.ts rename to apps/sim/tools/index.test.ts diff --git a/sim/tools/index.ts b/apps/sim/tools/index.ts similarity index 96% rename from sim/tools/index.ts rename to apps/sim/tools/index.ts index 35602663d..6bfce4644 100644 --- a/sim/tools/index.ts +++ b/apps/sim/tools/index.ts @@ -1,6 +1,6 @@ import { createLogger } from '@/lib/logs/console-logger' -import { getToolAsync, getTool } from './utils' import { OAuthTokenPayload, ToolConfig, ToolResponse } from './types' +import { getTool, getToolAsync } from './utils' import { formatRequestParams, validateToolRequest } from './utils' const logger = createLogger('Tools') @@ -18,7 +18,7 @@ export async function executeTool( try { let tool: ToolConfig | undefined - + // If it's a custom tool, use the async version with workflowId if (toolId.startsWith('custom_')) { const workflowId = params._context?.workflowId @@ -27,7 +27,7 @@ export async function executeTool( // For built-in tools, use the synchronous version tool = getTool(toolId) } - + // Ensure context is preserved if it exists const contextParams = { ...params } @@ -41,7 +41,7 @@ export async function executeTool( // If we have a credential parameter, fetch the access token if (contextParams.credential) { - logger.info(`[executeTool] Credential found for ${toolId}, fetching access token.`); + logger.info(`[executeTool] Credential found for ${toolId}, fetching access token.`) try { const baseUrl = process.env.NEXT_PUBLIC_APP_URL if (!baseUrl) { @@ -60,7 +60,9 @@ export async function executeTool( const workflowId = contextParams.workflowId || contextParams._context?.workflowId if (workflowId) { tokenPayload.workflowId = workflowId - logger.info(`[executeTool] Added workflowId ${workflowId} to token payload for ${toolId}`); + logger.info( + `[executeTool] Added workflowId ${workflowId} to token payload for ${toolId}` + ) } } @@ -79,7 +81,7 @@ export async function executeTool( const data = await response.json() contextParams.accessToken = data.accessToken - logger.info(`[executeTool] Successfully fetched access token for ${toolId}`); + logger.info(`[executeTool] Successfully fetched access token for ${toolId}`) // Clean up params we don't need to pass to the actual tool delete contextParams.credential @@ -87,7 +89,9 @@ export async function executeTool( } catch (error) { logger.error('[executeTool] Error fetching access token:', { error }) // Re-throw the error to fail the tool execution if token fetching fails - throw new Error(`Failed to obtain credential for tool ${toolId}: ${error instanceof Error ? error.message : String(error)}`); + throw new Error( + `Failed to obtain credential for tool ${toolId}: ${error instanceof Error ? error.message : String(error)}` + ) } } @@ -200,7 +204,7 @@ export async function executeTool( } // For external APIs, use the proxy - logger.info(`[executeTool] Using handleProxyRequest for toolId=${toolId}`); + logger.info(`[executeTool] Using handleProxyRequest for toolId=${toolId}`) const result = await handleProxyRequest(toolId, contextParams) // Apply post-processing if available and not skipped @@ -361,12 +365,12 @@ async function handleInternalRequest( method: requestParams.method, headers: new Headers(requestParams.headers), body: requestParams.body, - }; + } const response = await fetch(fullUrl, requestOptions) if (!response.ok) { - let errorData; + let errorData try { errorData = await response.json() logger.error(`Error response data: ${JSON.stringify(errorData)}`) @@ -408,7 +412,9 @@ async function handleInternalRequest( throw new Error(`Failed to parse response from ${toolId}: ${jsonError}`) } } catch (error: any) { - logger.error(`Error executing internal tool ${toolId}:`, { error: error.stack || error.message || error }) + logger.error(`Error executing internal tool ${toolId}:`, { + error: error.stack || error.message || error, + }) // Use the tool's error transformer if available if (tool.transformError) { @@ -506,7 +512,7 @@ function validateClientSideParams( if (internalParamSet.has(paramName)) { continue } - + const paramSchema = schema.properties[paramName] if (!paramSchema) { throw new Error(`Unknown parameter: ${paramName}`) @@ -535,7 +541,7 @@ async function handleProxyRequest( toolId: string, params: Record ): Promise { - logger.info(`[handleProxyRequest] Entry: toolId=${toolId}`); + logger.info(`[handleProxyRequest] Entry: toolId=${toolId}`) const baseUrl = process.env.NEXT_PUBLIC_APP_URL if (!baseUrl) { throw new Error('NEXT_PUBLIC_APP_URL environment variable is not set') diff --git a/sim/tools/jina/index.ts b/apps/sim/tools/jina/index.ts similarity index 64% rename from sim/tools/jina/index.ts rename to apps/sim/tools/jina/index.ts index 5f4846d3b..1e2e34fa8 100644 --- a/sim/tools/jina/index.ts +++ b/apps/sim/tools/jina/index.ts @@ -1,3 +1,3 @@ import { readUrlTool } from './read_url' -export { readUrlTool } \ No newline at end of file +export { readUrlTool } diff --git a/sim/tools/jina/read_url.ts b/apps/sim/tools/jina/read_url.ts similarity index 100% rename from sim/tools/jina/read_url.ts rename to apps/sim/tools/jina/read_url.ts diff --git a/apps/sim/tools/jina/types.ts b/apps/sim/tools/jina/types.ts new file mode 100644 index 000000000..bc9f89c88 --- /dev/null +++ b/apps/sim/tools/jina/types.ts @@ -0,0 +1,15 @@ +import { ToolResponse } from '../types' + +export interface ReadUrlParams { + url: string + useReaderLMv2?: boolean + gatherLinks?: boolean + jsonResponse?: boolean + apiKey?: string +} + +export interface ReadUrlResponse extends ToolResponse { + output: { + content: string + } +} diff --git a/apps/sim/tools/jira/bulk_read.ts b/apps/sim/tools/jira/bulk_read.ts new file mode 100644 index 000000000..448848cfb --- /dev/null +++ b/apps/sim/tools/jira/bulk_read.ts @@ -0,0 +1,216 @@ +import { ToolConfig } from '../types' +import { JiraRetrieveBulkParams, JiraRetrieveResponseBulk } from './types' + +export const jiraBulkRetrieveTool: ToolConfig = { + id: 'jira_bulk_read', + name: 'Jira Bulk Read', + description: 'Retrieve multiple Jira issues in bulk', + version: '1.0.0', + oauth: { + required: true, + provider: 'jira', + additionalScopes: ['read:jira-work', 'read:jira-user', 'read:me', 'offline_access'], + }, + params: { + accessToken: { + type: 'string', + required: true, + description: 'OAuth access token for Jira', + }, + domain: { + type: 'string', + required: true, + requiredForToolCall: true, + description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', + }, + projectId: { + type: 'string', + required: true, + description: 'Jira project ID', + }, + cloudId: { + type: 'string', + required: false, + description: 'Jira cloud ID', + }, + }, + request: { + url: (params: JiraRetrieveBulkParams) => { + if (params.cloudId) { + return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/picker?currentJQL=project=${params.projectId}` + } + // If no cloudId, use the accessible resources endpoint + return 'https://api.atlassian.com/oauth/token/accessible-resources' + }, + method: 'GET', + headers: (params: JiraRetrieveBulkParams) => ({ + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + }), + body: (params: JiraRetrieveBulkParams) => ({}), + }, + transformResponse: async (response: Response, params?: JiraRetrieveBulkParams) => { + if (!params) { + throw new Error('Parameters are required for Jira bulk issue retrieval') + } + + try { + // If we don't have a cloudId, we need to fetch it first + if (!params.cloudId) { + if (!response.ok) { + const errorData = await response.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to fetch accessible resources: ${response.status} ${response.statusText}` + ) + } + + const accessibleResources = await response.json() + if (!Array.isArray(accessibleResources) || accessibleResources.length === 0) { + throw new Error('No accessible Jira resources found for this account') + } + + const normalizedInput = `https://${params.domain}`.toLowerCase() + const matchedResource = accessibleResources.find( + (r) => r.url.toLowerCase() === normalizedInput + ) + + if (!matchedResource) { + throw new Error(`Could not find matching Jira site for domain: ${params.domain}`) + } + + // First get issue keys from picker + const pickerUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/picker?currentJQL=project=${params.projectId}` + const pickerResponse = await fetch(pickerUrl, { + method: 'GET', + headers: { + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + }, + }) + + if (!pickerResponse.ok) { + const errorData = await pickerResponse.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve issue keys: ${pickerResponse.status} ${pickerResponse.statusText}` + ) + } + + const pickerData = await pickerResponse.json() + const issueKeys = pickerData.sections + .flatMap((section: any) => section.issues || []) + .map((issue: any) => issue.key) + + if (issueKeys.length === 0) { + return { + success: true, + output: [], + } + } + + // Now use bulkfetch to get the full issue details + const bulkfetchUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/bulkfetch` + const bulkfetchResponse = await fetch(bulkfetchUrl, { + method: 'POST', + headers: { + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + expand: ['names'], + fields: ['summary', 'description', 'created', 'updated'], + fieldsByKeys: false, + issueIdsOrKeys: issueKeys, + properties: [], + }), + }) + + if (!bulkfetchResponse.ok) { + const errorData = await bulkfetchResponse.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve Jira issues: ${bulkfetchResponse.status} ${bulkfetchResponse.statusText}` + ) + } + + const data = await bulkfetchResponse.json() + return { + success: true, + output: data.issues.map((issue: any) => ({ + ts: new Date().toISOString(), + summary: issue.fields.summary, + description: issue.fields.description?.content?.[0]?.content?.[0]?.text || '', + created: issue.fields.created, + updated: issue.fields.updated, + })), + } + } + + // If we have a cloudId, this response is from the issue picker + if (!response.ok) { + const errorData = await response.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve issue keys: ${response.status} ${response.statusText}` + ) + } + + const pickerData = await response.json() + const issueKeys = pickerData.sections + .flatMap((section: any) => section.issues || []) + .map((issue: any) => issue.key) + + if (issueKeys.length === 0) { + return { + success: true, + output: [], + } + } + + // Use bulkfetch to get the full issue details + const bulkfetchUrl = `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/bulkfetch` + const bulkfetchResponse = await fetch(bulkfetchUrl, { + method: 'POST', + headers: { + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + 'Content-Type': 'application/json', + }, + body: JSON.stringify({ + expand: ['names'], + fields: ['summary', 'description', 'created', 'updated'], + fieldsByKeys: false, + issueIdsOrKeys: issueKeys, + properties: [], + }), + }) + + if (!bulkfetchResponse.ok) { + const errorData = await bulkfetchResponse.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve Jira issues: ${bulkfetchResponse.status} ${bulkfetchResponse.statusText}` + ) + } + + const data = await bulkfetchResponse.json() + return { + success: true, + output: data.issues.map((issue: any) => ({ + ts: new Date().toISOString(), + summary: issue.fields.summary, + description: issue.fields.description?.content?.[0]?.content?.[0]?.text || '', + created: issue.fields.created, + updated: issue.fields.updated, + })), + } + } catch (error) { + throw error instanceof Error ? error : new Error(String(error)) + } + }, + transformError: (error: any) => { + return error.message || 'Failed to retrieve Jira issues' + }, +} diff --git a/sim/tools/jira/index.ts b/apps/sim/tools/jira/index.ts similarity index 100% rename from sim/tools/jira/index.ts rename to apps/sim/tools/jira/index.ts index 6c18fdccd..6c7c88866 100644 --- a/sim/tools/jira/index.ts +++ b/apps/sim/tools/jira/index.ts @@ -1,7 +1,7 @@ +import { jiraBulkRetrieveTool } from './bulk_read' import { jiraRetrieveTool } from './retrieve' import { jiraUpdateTool } from './update' import { jiraWriteTool } from './write' -import { jiraBulkRetrieveTool } from './bulk_read' export { jiraRetrieveTool } export { jiraUpdateTool } diff --git a/apps/sim/tools/jira/retrieve.ts b/apps/sim/tools/jira/retrieve.ts new file mode 100644 index 000000000..c13754a89 --- /dev/null +++ b/apps/sim/tools/jira/retrieve.ts @@ -0,0 +1,162 @@ +import { ToolConfig } from '../types' +import { JiraRetrieveParams, JiraRetrieveResponse } from './types' + +export const jiraRetrieveTool: ToolConfig = { + id: 'jira_retrieve', + name: 'Jira Retrieve', + description: 'Retrieve detailed information about a specific Jira issue', + version: '1.0.0', + + oauth: { + required: true, + provider: 'jira', + additionalScopes: ['read:jira-work', 'read:jira-user', 'read:me', 'offline_access'], + }, + params: { + accessToken: { + type: 'string', + required: true, + description: 'OAuth access token for Jira', + }, + domain: { + type: 'string', + required: true, + requiredForToolCall: true, + description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', + }, + projectId: { + type: 'string', + required: false, + description: + 'Jira project ID to retrieve issues from. If not provided, all issues will be retrieved.', + }, + issueKey: { + type: 'string', + required: true, + description: 'Jira issue key to retrieve (e.g., PROJ-123)', + }, + cloudId: { + type: 'string', + required: false, + description: + 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + request: { + url: (params: JiraRetrieveParams) => { + if (params.cloudId) { + return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/${params.issueKey}?expand=renderedFields,names,schema,transitions,operations,editmeta,changelog` + } + // If no cloudId, use the accessible resources endpoint + return 'https://api.atlassian.com/oauth/token/accessible-resources' + }, + method: 'GET', + headers: (params: JiraRetrieveParams) => { + return { + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + } + }, + }, + + transformResponse: async (response: Response, params?: JiraRetrieveParams) => { + if (!params) { + throw new Error('Parameters are required for Jira issue retrieval') + } + + try { + // If we don't have a cloudId, we need to fetch it first + if (!params.cloudId) { + if (!response.ok) { + const errorData = await response.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to fetch accessible resources: ${response.status} ${response.statusText}` + ) + } + + const accessibleResources = await response.json() + if (!Array.isArray(accessibleResources) || accessibleResources.length === 0) { + throw new Error('No accessible Jira resources found for this account') + } + + const normalizedInput = `https://${params.domain}`.toLowerCase() + const matchedResource = accessibleResources.find( + (r) => r.url.toLowerCase() === normalizedInput + ) + + if (!matchedResource) { + throw new Error(`Could not find matching Jira site for domain: ${params.domain}`) + } + + // Now fetch the actual issue with the found cloudId + const issueUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/${params.issueKey}?expand=renderedFields,names,schema,transitions,operations,editmeta,changelog` + const issueResponse = await fetch(issueUrl, { + method: 'GET', + headers: { + Accept: 'application/json', + Authorization: `Bearer ${params.accessToken}`, + }, + }) + + if (!issueResponse.ok) { + const errorData = await issueResponse.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve Jira issue: ${issueResponse.status} ${issueResponse.statusText}` + ) + } + + const data = await issueResponse.json() + if (!data || !data.fields) { + throw new Error('Invalid response format from Jira API') + } + + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: data.key, + summary: data.fields.summary, + description: data.fields.description, + created: data.fields.created, + updated: data.fields.updated, + }, + } + } + + // If we have a cloudId, this response is the issue data + if (!response.ok) { + const errorData = await response.json().catch(() => null) + throw new Error( + errorData?.message || + `Failed to retrieve Jira issue: ${response.status} ${response.statusText}` + ) + } + + const data = await response.json() + if (!data || !data.fields) { + throw new Error('Invalid response format from Jira API') + } + + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: data.key, + summary: data.fields.summary, + description: data.fields.description, + created: data.fields.created, + updated: data.fields.updated, + }, + } + } catch (error) { + throw error instanceof Error ? error : new Error(String(error)) + } + }, + + transformError: (error: any) => { + return error.message || 'Failed to retrieve Jira issue' + }, +} diff --git a/sim/tools/jira/types.ts b/apps/sim/tools/jira/types.ts similarity index 99% rename from sim/tools/jira/types.ts rename to apps/sim/tools/jira/types.ts index cce2d0d59..c9f003efe 100644 --- a/sim/tools/jira/types.ts +++ b/apps/sim/tools/jira/types.ts @@ -35,7 +35,6 @@ export interface JiraRetrieveResponseBulk extends ToolResponse { }[] } - export interface JiraUpdateParams { accessToken: string domain: string @@ -104,4 +103,4 @@ export interface JiraCloudResource { name: string scopes: string[] avatarUrl: string -} \ No newline at end of file +} diff --git a/apps/sim/tools/jira/update.ts b/apps/sim/tools/jira/update.ts new file mode 100644 index 000000000..0333e34f0 --- /dev/null +++ b/apps/sim/tools/jira/update.ts @@ -0,0 +1,220 @@ +import { ToolConfig } from '../types' +import { JiraUpdateParams, JiraUpdateResponse } from './types' +import { getJiraCloudId } from './utils' + +export const jiraUpdateTool: ToolConfig = { + id: 'jira_update', + name: 'Jira Update', + description: 'Update a Jira issue', + version: '1.0.0', + + oauth: { + required: true, + provider: 'jira', + additionalScopes: ['read:jira-user', 'write:jira-work', 'write:issue:jira', 'read:jira-work'], + }, + + params: { + accessToken: { + type: 'string', + required: true, + description: 'OAuth access token for Jira', + }, + domain: { + type: 'string', + required: true, + requiredForToolCall: true, + description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', + }, + projectId: { + type: 'string', + required: false, + description: + 'Jira project ID to update issues in. If not provided, all issues will be retrieved.', + }, + issueKey: { + type: 'string', + required: true, + description: 'Jira issue key to update', + }, + summary: { + type: 'string', + required: false, + description: 'New summary for the issue', + }, + description: { + type: 'string', + required: false, + description: 'New description for the issue', + }, + status: { + type: 'string', + required: false, + description: 'New status for the issue', + }, + priority: { + type: 'string', + required: false, + description: 'New priority for the issue', + }, + assignee: { + type: 'string', + required: false, + description: 'New assignee for the issue', + }, + cloudId: { + type: 'string', + required: false, + description: + 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + }, + + directExecution: async (params) => { + // Pre-fetch the cloudId if not provided + if (!params.cloudId) { + try { + params.cloudId = await getJiraCloudId(params.domain, params.accessToken) + } catch (error) { + throw error + } + } + return undefined // Let the regular request handling take over + }, + + request: { + url: (params) => { + const { domain, issueKey, cloudId } = params + if (!domain || !issueKey || !cloudId) { + throw new Error('Domain, issueKey, and cloudId are required') + } + + const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}` + return url + }, + method: 'PUT', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + 'Content-Type': 'application/json', + }), + body: (params) => { + // Map the summary from either summary or title field + const summaryValue = params.summary || params.title + const descriptionValue = params.description + + const fields: Record = {} + + if (summaryValue) { + fields.summary = summaryValue + } + + if (descriptionValue) { + fields.description = { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: descriptionValue, + }, + ], + }, + ], + } + } + + if (params.status) { + fields.status = { + name: params.status, + } + } + + if (params.priority) { + fields.priority = { + name: params.priority, + } + } + + if (params.assignee) { + fields.assignee = { + id: params.assignee, + } + } + + return { fields } + }, + }, + + transformResponse: async (response: Response, params?: JiraUpdateParams) => { + // Log the response details for debugging + const responseText = await response.text() + + if (!response.ok) { + try { + if (responseText) { + const data = JSON.parse(responseText) + throw new Error( + data.errorMessages?.[0] || + data.errors?.[Object.keys(data.errors)[0]] || + data.message || + 'Failed to update Jira issue' + ) + } else { + throw new Error(`Request failed with status ${response.status}: ${response.statusText}`) + } + } catch (e) { + if (e instanceof SyntaxError) { + // If we can't parse the response as JSON, return the raw text + throw new Error(`Jira API error (${response.status}): ${responseText}`) + } + throw e + } + } + + // For successful responses + try { + if (!responseText) { + // Some successful PUT requests might return no content + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: params?.issueKey || 'unknown', + summary: 'Issue updated successfully', + success: true, + }, + } + } + + const data = JSON.parse(responseText) + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: data.key || params?.issueKey || 'unknown', + summary: data.fields?.summary || 'Issue updated', + success: true, + }, + } + } catch (e) { + // If we can't parse the response but it was successful, still return success + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: params?.issueKey || 'unknown', + summary: 'Issue updated (response parsing failed)', + success: true, + }, + } + } + }, + + transformError: (error: any) => { + return error.message || 'Failed to update Jira issue' + }, +} diff --git a/apps/sim/tools/jira/utils.ts b/apps/sim/tools/jira/utils.ts new file mode 100644 index 000000000..986c5451b --- /dev/null +++ b/apps/sim/tools/jira/utils.ts @@ -0,0 +1,33 @@ +export async function getJiraCloudId(domain: string, accessToken: string): Promise { + try { + const response = await fetch('https://api.atlassian.com/oauth/token/accessible-resources', { + method: 'GET', + headers: { + Authorization: `Bearer ${accessToken}`, + Accept: 'application/json', + }, + }) + + const resources = await response.json() + + // If we have resources, find the matching one + if (Array.isArray(resources) && resources.length > 0) { + const normalizedInput = `https://${domain}`.toLowerCase() + const matchedResource = resources.find((r) => r.url.toLowerCase() === normalizedInput) + + if (matchedResource) { + return matchedResource.id + } + } + + // If we couldn't find a match, return the first resource's ID + // This is a fallback in case the URL matching fails + if (Array.isArray(resources) && resources.length > 0) { + return resources[0].id + } + + throw new Error('No Jira resources found') + } catch (error) { + throw error + } +} diff --git a/apps/sim/tools/jira/write.ts b/apps/sim/tools/jira/write.ts new file mode 100644 index 000000000..609b4f7c0 --- /dev/null +++ b/apps/sim/tools/jira/write.ts @@ -0,0 +1,227 @@ +import { ToolConfig } from '../types' +import { JiraWriteParams, JiraWriteResponse } from './types' +import { getJiraCloudId } from './utils' + +export const jiraWriteTool: ToolConfig = { + id: 'jira_write', + name: 'Jira Write', + description: 'Write a Jira issue', + version: '1.0.0', + + oauth: { + required: true, + provider: 'jira', + additionalScopes: [ + 'read:jira-user', + 'write:jira-work', + 'read:project:jira', + 'read:issue:jira', + 'write:issue:jira', + 'write:comment:jira', + 'write:comment.property:jira', + 'write:attachment:jira', + 'read:attachment:jira', + ], + }, + + params: { + accessToken: { + type: 'string', + required: true, + description: 'OAuth access token for Jira', + }, + domain: { + type: 'string', + required: true, + requiredForToolCall: true, + description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', + }, + projectId: { + type: 'string', + required: true, + description: 'Project ID for the issue', + }, + summary: { + type: 'string', + required: true, + description: 'Summary for the issue', + }, + description: { + type: 'string', + required: false, + description: 'Description for the issue', + }, + priority: { + type: 'string', + required: false, + description: 'Priority for the issue', + }, + assignee: { + type: 'string', + required: false, + description: 'Assignee for the issue', + }, + cloudId: { + type: 'string', + required: false, + description: + 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', + }, + issueType: { + type: 'string', + required: true, + description: 'Type of issue to create (e.g., Task, Story, Bug, Sub-task)', + }, + }, + + directExecution: async (params) => { + // Pre-fetch the cloudId if not provided + if (!params.cloudId) { + try { + params.cloudId = await getJiraCloudId(params.domain, params.accessToken) + } catch (error) { + throw error + } + } + return undefined // Let the regular request handling take over + }, + + request: { + url: (params) => { + const { domain, cloudId } = params + if (!domain || !cloudId) { + throw new Error('Domain and cloudId are required') + } + + const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue` + + return url + }, + method: 'POST', + headers: (params) => ({ + Authorization: `Bearer ${params.accessToken}`, + Accept: 'application/json', + 'Content-Type': 'application/json', + }), + body: (params) => { + // Validate required fields + if (!params.projectId) { + throw new Error('Project ID is required') + } + if (!params.summary) { + throw new Error('Summary is required') + } + if (!params.issueType) { + throw new Error('Issue type is required') + } + + // Construct fields object with only the necessary fields + const fields: Record = { + project: { + id: params.projectId, + }, + issuetype: { + name: params.issueType, + }, + summary: params.summary, // Use the summary field directly + } + + // Only add description if it exists + if (params.description) { + fields.description = { + type: 'doc', + version: 1, + content: [ + { + type: 'paragraph', + content: [ + { + type: 'text', + text: params.description, + }, + ], + }, + ], + } + } + + // Only add parent if it exists + if (params.parent) { + fields.parent = params.parent + } + + const body = { fields } + return body + }, + }, + + transformResponse: async (response: Response, params?: JiraWriteParams) => { + // Log the response details for debugging + const responseText = await response.text() + + if (!response.ok) { + try { + if (responseText) { + const data = JSON.parse(responseText) + throw new Error( + data.errorMessages?.[0] || + data.errors?.[Object.keys(data.errors)[0]] || + data.message || + 'Failed to create Jira issue' + ) + } else { + throw new Error(`Request failed with status ${response.status}: ${response.statusText}`) + } + } catch (e) { + if (e instanceof SyntaxError) { + // If we can't parse the response as JSON, return the raw text + throw new Error(`Jira API error (${response.status}): ${responseText}`) + } + throw e + } + } + + // For successful responses + try { + if (!responseText) { + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: 'unknown', + summary: 'Issue created successfully', + success: true, + url: '', + }, + } + } + + const data = JSON.parse(responseText) + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: data.key || 'unknown', + summary: data.fields?.summary || 'Issue created', + success: true, + url: `https://${params?.domain}/browse/${data.key}`, + }, + } + } catch (e) { + return { + success: true, + output: { + ts: new Date().toISOString(), + issueKey: 'unknown', + summary: 'Issue created (response parsing failed)', + success: true, + url: '', + }, + } + } + }, + + transformError: (error: any) => { + return error.message || 'Failed to create Jira issue' + }, +} diff --git a/apps/sim/tools/linkup/index.ts b/apps/sim/tools/linkup/index.ts new file mode 100644 index 000000000..694a53047 --- /dev/null +++ b/apps/sim/tools/linkup/index.ts @@ -0,0 +1,3 @@ +import { searchTool } from './search' + +export const linkupSearchTool = searchTool diff --git a/sim/tools/linkup/search.ts b/apps/sim/tools/linkup/search.ts similarity index 95% rename from sim/tools/linkup/search.ts rename to apps/sim/tools/linkup/search.ts index a863c8c76..e5d04e473 100644 --- a/sim/tools/linkup/search.ts +++ b/apps/sim/tools/linkup/search.ts @@ -6,7 +6,7 @@ export const searchTool: ToolConfig ({ 'Content-Type': 'application/json', - 'Authorization': `Bearer ${params.apiKey}`, + Authorization: `Bearer ${params.apiKey}`, }), body: (params) => { const body: Record = { q: params.q, } - + if (params.depth) body.depth = params.depth if (params.outputType) body.outputType = params.outputType body.includeImages = false - + return body }, }, - + transformResponse: async (response: Response) => { if (!response.ok) { const errorText = await response.text() throw new Error(`Linkup API error: ${response.status} ${errorText}`) } - + const data: LinkupSearchResponse = await response.json() - + return { success: true, output: { @@ -67,8 +67,8 @@ export const searchTool: ToolConfig { return `Error searching with Linkup: ${error.message}` }, -} \ No newline at end of file +} diff --git a/sim/tools/linkup/types.ts b/apps/sim/tools/linkup/types.ts similarity index 82% rename from sim/tools/linkup/types.ts rename to apps/sim/tools/linkup/types.ts index a2480bd98..9f8a05cee 100644 --- a/sim/tools/linkup/types.ts +++ b/apps/sim/tools/linkup/types.ts @@ -1,4 +1,4 @@ -import { ToolResponse } from "../types" +import { ToolResponse } from '../types' export interface LinkupSource { name: string @@ -16,8 +16,8 @@ export interface LinkupSearchParams { export interface LinkupSearchResponse { answer: string sources: LinkupSource[] -} +} export interface LinkupSearchToolResponse extends ToolResponse { - output: LinkupSearchResponse - } \ No newline at end of file + output: LinkupSearchResponse +} diff --git a/apps/sim/tools/mem0/add_memories.ts b/apps/sim/tools/mem0/add_memories.ts new file mode 100644 index 000000000..667befd81 --- /dev/null +++ b/apps/sim/tools/mem0/add_memories.ts @@ -0,0 +1,120 @@ +import { ToolConfig } from '../types' + +// Add Memories Tool +export const mem0AddMemoriesTool: ToolConfig = { + id: 'mem0_add_memories', + name: 'Add Memories', + description: 'Add memories to Mem0 for persistent storage and retrieval', + version: '1.0.0', + params: { + apiKey: { + type: 'string', + required: true, + description: 'Your Mem0 API key', + }, + userId: { + type: 'string', + required: true, + description: 'User ID associated with the memory', + }, + messages: { + type: 'json', + required: true, + description: 'Array of message objects with role and content', + }, + }, + request: { + url: 'https://api.mem0.ai/v1/memories/', + method: 'POST', + headers: (params) => ({ + Authorization: `Token ${params.apiKey}`, + 'Content-Type': 'application/json', + }), + body: (params) => { + // First, ensure messages is an array + let messagesArray = params.messages + if (typeof messagesArray === 'string') { + try { + messagesArray = JSON.parse(messagesArray) + } catch (e) { + throw new Error('Messages must be a valid JSON array of objects with role and content') + } + } + + // Validate message format + if (!Array.isArray(messagesArray) || messagesArray.length === 0) { + throw new Error('Messages must be a non-empty array') + } + + for (const msg of messagesArray) { + if (!msg.role || !msg.content) { + throw new Error('Each message must have role and content properties') + } + } + + // Prepare request body + const body: Record = { + messages: messagesArray, + version: 'v2', + user_id: params.userId, + } + + return body + }, + }, + transformResponse: async (response) => { + const data = await response.json() + + // If the API returns an empty array, this might be normal behavior on success + if (Array.isArray(data) && data.length === 0) { + return { + success: true, + output: { + memories: [], + }, + } + } + + // Handle array response with memory objects + if (Array.isArray(data) && data.length > 0) { + // Extract IDs for easy access + const memoryIds = data.map((memory) => memory.id) + + return { + success: true, + output: { + ids: memoryIds, + memories: data, + }, + } + } + + // Handle non-array responses (single memory object) + if (data && !Array.isArray(data) && data.id) { + return { + success: true, + output: { + ids: [data.id], + memories: [data], + }, + } + } + + // Default response format if none of the above match + return { + success: true, + output: { + memories: Array.isArray(data) ? data : [data], + }, + } + }, + transformError: async (error) => { + return { + success: false, + output: { + ids: [], + memories: [], + }, + } + }, +} diff --git a/apps/sim/tools/mem0/get_memories.ts b/apps/sim/tools/mem0/get_memories.ts new file mode 100644 index 000000000..5d53d2040 --- /dev/null +++ b/apps/sim/tools/mem0/get_memories.ts @@ -0,0 +1,138 @@ +import { ToolConfig } from '../types' + +// Get Memories Tool +export const mem0GetMemoriesTool: ToolConfig = { + id: 'mem0_get_memories', + name: 'Get Memories', + description: 'Retrieve memories from Mem0 by ID or filter criteria', + version: '1.0.0', + params: { + apiKey: { + type: 'string', + required: true, + description: 'Your Mem0 API key', + }, + userId: { + type: 'string', + required: true, + description: 'User ID to retrieve memories for', + }, + memoryId: { + type: 'string', + required: false, + description: 'Specific memory ID to retrieve', + }, + startDate: { + type: 'string', + required: false, + description: 'Start date for filtering by created_at (format: YYYY-MM-DD)', + }, + endDate: { + type: 'string', + required: false, + description: 'End date for filtering by created_at (format: YYYY-MM-DD)', + }, + limit: { + type: 'number', + required: false, + default: 10, + description: 'Maximum number of results to return', + }, + }, + request: { + url: (params: Record) => { + // For a specific memory ID, use the get single memory endpoint + if (params.memoryId) { + // Dynamically set method to GET for memory ID requests + params.method = 'GET' + return `https://api.mem0.ai/v1/memories/${params.memoryId}/` + } + // Otherwise use v2 memories endpoint with filters + return 'https://api.mem0.ai/v2/memories/' + }, + method: 'POST', // Default to POST for filtering + headers: (params) => ({ + 'Content-Type': 'application/json', + Authorization: `Token ${params.apiKey}`, + }), + body: (params: Record) => { + // For specific memory ID, we'll use GET method instead and don't need a body + // But we still need to return an empty object to satisfy the type + if (params.memoryId) { + return {} + } + + // Build filters array for AND condition + const andConditions = [] + + // Add user filter + andConditions.push({ user_id: params.userId }) + + // Add date range filter if provided + if (params.startDate || params.endDate) { + const dateFilter: Record = {} + + if (params.startDate) { + dateFilter.gte = params.startDate + } + + if (params.endDate) { + dateFilter.lte = params.endDate + } + + andConditions.push({ created_at: dateFilter }) + } + + // Build final filters object + const body: Record = { + page_size: params.limit || 10, + } + + // Only add filters if we have any conditions + if (andConditions.length > 0) { + body.filters = { AND: andConditions } + } + + return body + }, + }, + transformResponse: async (response, params) => { + try { + // Get raw response for debugging + const responseText = await response.clone().text() + + // Parse the response + const data = JSON.parse(responseText) + + // Format the memories for display + const memories = Array.isArray(data) ? data : [data] + + // Extract IDs if available + const ids = memories.map((memory) => memory.id).filter(Boolean) + + return { + success: true, + output: { + memories, + ids, + }, + } + } catch (error: any) { + return { + success: false, + output: { + error: `Failed to process get memories response: ${error.message}`, + }, + } + } + }, + transformError: async (error) => { + return { + success: false, + output: { + ids: [], + memories: [], + }, + } + }, +} diff --git a/sim/tools/mem0/index.ts b/apps/sim/tools/mem0/index.ts similarity index 95% rename from sim/tools/mem0/index.ts rename to apps/sim/tools/mem0/index.ts index fc5c3de62..26b236862 100644 --- a/sim/tools/mem0/index.ts +++ b/apps/sim/tools/mem0/index.ts @@ -1,5 +1,5 @@ import { mem0AddMemoriesTool } from './add_memories' -import { mem0SearchMemoriesTool } from './search_memories' import { mem0GetMemoriesTool } from './get_memories' +import { mem0SearchMemoriesTool } from './search_memories' -export { mem0AddMemoriesTool, mem0SearchMemoriesTool, mem0GetMemoriesTool } \ No newline at end of file +export { mem0AddMemoriesTool, mem0SearchMemoriesTool, mem0GetMemoriesTool } diff --git a/apps/sim/tools/mem0/search_memories.ts b/apps/sim/tools/mem0/search_memories.ts new file mode 100644 index 000000000..0d6a0a07f --- /dev/null +++ b/apps/sim/tools/mem0/search_memories.ts @@ -0,0 +1,119 @@ +import { ToolConfig } from '../types' + +// Search Memories Tool +export const mem0SearchMemoriesTool: ToolConfig = { + id: 'mem0_search_memories', + name: 'Search Memories', + description: 'Search for memories in Mem0 using semantic search', + version: '1.0.0', + params: { + apiKey: { + type: 'string', + required: true, + description: 'Your Mem0 API key', + }, + userId: { + type: 'string', + required: true, + description: 'User ID to search memories for', + }, + query: { + type: 'string', + required: true, + description: 'Search query to find relevant memories', + }, + limit: { + type: 'number', + required: false, + default: 10, + description: 'Maximum number of results to return', + }, + }, + request: { + url: 'https://api.mem0.ai/v2/memories/search/', + method: 'POST', + headers: (params) => ({ + 'Content-Type': 'application/json', + Authorization: `Token ${params.apiKey}`, + }), + body: (params) => { + try { + // Create the request body with the format that the curl test confirms works + const body: Record = { + query: params.query || 'test', + filters: { + user_id: params.userId, + }, + top_k: params.limit || 10, + } + + return body + } catch (error) { + throw error + } + }, + }, + transformResponse: async (response) => { + try { + // Get raw response for debugging + const responseText = await response.clone().text() + + // Parse the response + const data = JSON.parse(responseText) + + // Handle empty results + if (!data || (Array.isArray(data) && data.length === 0)) { + return { + success: true, + output: { + searchResults: [], + ids: [], + }, + } + } + + // For array results (standard format) + if (Array.isArray(data)) { + const searchResults = data.map((item) => ({ + id: item.id, + data: { memory: item.memory || '' }, + score: item.score || 0, + })) + + const ids = data.map((item) => item.id).filter(Boolean) + + return { + success: true, + output: { + searchResults, + ids, + }, + } + } + + // Fallback for unexpected response format + return { + success: true, + output: { + searchResults: [], + }, + } + } catch (error: any) { + return { + success: false, + output: { + error: `Failed to process search response: ${error.message}`, + }, + } + } + }, + transformError: async (error) => { + return { + success: false, + output: { + ids: [], + searchResults: [], + }, + } + }, +} diff --git a/apps/sim/tools/mem0/types.ts b/apps/sim/tools/mem0/types.ts new file mode 100644 index 000000000..99a6998b5 --- /dev/null +++ b/apps/sim/tools/mem0/types.ts @@ -0,0 +1,9 @@ +import { ToolResponse } from '../types' + +export interface Mem0Response extends ToolResponse { + output: { + ids?: string[] + memories?: any[] + searchResults?: any[] + } +} diff --git a/sim/tools/mistral/index.ts b/apps/sim/tools/mistral/index.ts similarity index 100% rename from sim/tools/mistral/index.ts rename to apps/sim/tools/mistral/index.ts diff --git a/sim/tools/mistral/parser.ts b/apps/sim/tools/mistral/parser.ts similarity index 100% rename from sim/tools/mistral/parser.ts rename to apps/sim/tools/mistral/parser.ts index d856e866b..c2bb1738e 100644 --- a/sim/tools/mistral/parser.ts +++ b/apps/sim/tools/mistral/parser.ts @@ -1,7 +1,7 @@ import { createLogger } from '@/lib/logs/console-logger' +import { getBaseUrl } from '@/lib/urls/utils' import { ToolConfig } from '../types' import { MistralParserInput, MistralParserOutput } from './types' -import { getBaseUrl } from '@/lib/urls/utils' const logger = createLogger('MistralParserTool') diff --git a/sim/tools/mistral/types.ts b/apps/sim/tools/mistral/types.ts similarity index 100% rename from sim/tools/mistral/types.ts rename to apps/sim/tools/mistral/types.ts diff --git a/sim/tools/notion/create_page.ts b/apps/sim/tools/notion/create_page.ts similarity index 100% rename from sim/tools/notion/create_page.ts rename to apps/sim/tools/notion/create_page.ts diff --git a/sim/tools/notion/index.ts b/apps/sim/tools/notion/index.ts similarity index 100% rename from sim/tools/notion/index.ts rename to apps/sim/tools/notion/index.ts index d8556c86c..b08783231 100644 --- a/sim/tools/notion/index.ts +++ b/apps/sim/tools/notion/index.ts @@ -1,6 +1,6 @@ import { notionCreatePageTool } from './create_page' import { notionReadTool } from './read' -import { notionWriteTool } from './write' import { notionUpdatePageTool } from './update_page' +import { notionWriteTool } from './write' export { notionReadTool, notionWriteTool, notionCreatePageTool, notionUpdatePageTool } diff --git a/sim/tools/notion/read.ts b/apps/sim/tools/notion/read.ts similarity index 100% rename from sim/tools/notion/read.ts rename to apps/sim/tools/notion/read.ts diff --git a/sim/tools/notion/types.ts b/apps/sim/tools/notion/types.ts similarity index 100% rename from sim/tools/notion/types.ts rename to apps/sim/tools/notion/types.ts diff --git a/sim/tools/notion/update_page.ts b/apps/sim/tools/notion/update_page.ts similarity index 100% rename from sim/tools/notion/update_page.ts rename to apps/sim/tools/notion/update_page.ts diff --git a/sim/tools/notion/write.ts b/apps/sim/tools/notion/write.ts similarity index 100% rename from sim/tools/notion/write.ts rename to apps/sim/tools/notion/write.ts diff --git a/sim/tools/openai/dalle.ts b/apps/sim/tools/openai/dalle.ts similarity index 99% rename from sim/tools/openai/dalle.ts rename to apps/sim/tools/openai/dalle.ts index 6e8bff87b..5d3d44f81 100644 --- a/sim/tools/openai/dalle.ts +++ b/apps/sim/tools/openai/dalle.ts @@ -1,6 +1,6 @@ -import { ToolConfig } from '../types' import { createLogger } from '@/lib/logs/console-logger' import { getBaseUrl } from '@/lib/urls/utils' +import { ToolConfig } from '../types' const logger = createLogger('DalleTool') @@ -107,7 +107,7 @@ export const dalleTool: ToolConfig = { const arrayBuffer = await imageBlob.arrayBuffer() const buffer = Buffer.from(arrayBuffer) const base64Image = buffer.toString('base64') - + return { success: true, output: { @@ -146,7 +146,7 @@ export const dalleTool: ToolConfig = { const arrayBuffer = await imageBlob.arrayBuffer() const buffer = Buffer.from(arrayBuffer) const base64Image = buffer.toString('base64') - + logger.info( 'Successfully converted image to base64 via direct fetch, length:', base64Image.length diff --git a/sim/tools/openai/embeddings.ts b/apps/sim/tools/openai/embeddings.ts similarity index 100% rename from sim/tools/openai/embeddings.ts rename to apps/sim/tools/openai/embeddings.ts diff --git a/sim/tools/openai/index.ts b/apps/sim/tools/openai/index.ts similarity index 69% rename from sim/tools/openai/index.ts rename to apps/sim/tools/openai/index.ts index d1429444c..ccf1628d2 100644 --- a/sim/tools/openai/index.ts +++ b/apps/sim/tools/openai/index.ts @@ -1,4 +1,4 @@ -import { embeddingsTool } from './embeddings' import { dalleTool } from './dalle' +import { embeddingsTool } from './embeddings' -export { embeddingsTool, dalleTool } \ No newline at end of file +export { embeddingsTool, dalleTool } diff --git a/apps/sim/tools/openai/types.ts b/apps/sim/tools/openai/types.ts new file mode 100644 index 000000000..1c73cf4f7 --- /dev/null +++ b/apps/sim/tools/openai/types.ts @@ -0,0 +1,19 @@ +import { ToolResponse } from '../types' + +export interface DalleResponse extends ToolResponse { + output: { + content: string // This will now be the image URL + image: string // This will be the base64 image data + metadata: { + model: string // Only contains model name now + } + } +} + +export interface OpenAIEmbeddingsParams { + apiKey: string + input: string | string[] + model?: string + encoding_format?: 'float' | 'base64' + user?: string +} diff --git a/sim/tools/perplexity/chat.ts b/apps/sim/tools/perplexity/chat.ts similarity index 100% rename from sim/tools/perplexity/chat.ts rename to apps/sim/tools/perplexity/chat.ts diff --git a/sim/tools/perplexity/index.ts b/apps/sim/tools/perplexity/index.ts similarity index 100% rename from sim/tools/perplexity/index.ts rename to apps/sim/tools/perplexity/index.ts diff --git a/sim/tools/perplexity/types.ts b/apps/sim/tools/perplexity/types.ts similarity index 100% rename from sim/tools/perplexity/types.ts rename to apps/sim/tools/perplexity/types.ts diff --git a/sim/tools/pinecone/fetch.ts b/apps/sim/tools/pinecone/fetch.ts similarity index 100% rename from sim/tools/pinecone/fetch.ts rename to apps/sim/tools/pinecone/fetch.ts diff --git a/sim/tools/pinecone/generate_embeddings.ts b/apps/sim/tools/pinecone/generate_embeddings.ts similarity index 100% rename from sim/tools/pinecone/generate_embeddings.ts rename to apps/sim/tools/pinecone/generate_embeddings.ts diff --git a/sim/tools/pinecone/index.ts b/apps/sim/tools/pinecone/index.ts similarity index 100% rename from sim/tools/pinecone/index.ts rename to apps/sim/tools/pinecone/index.ts diff --git a/sim/tools/pinecone/search_text.ts b/apps/sim/tools/pinecone/search_text.ts similarity index 100% rename from sim/tools/pinecone/search_text.ts rename to apps/sim/tools/pinecone/search_text.ts diff --git a/sim/tools/pinecone/search_vector.ts b/apps/sim/tools/pinecone/search_vector.ts similarity index 100% rename from sim/tools/pinecone/search_vector.ts rename to apps/sim/tools/pinecone/search_vector.ts diff --git a/sim/tools/pinecone/types.ts b/apps/sim/tools/pinecone/types.ts similarity index 100% rename from sim/tools/pinecone/types.ts rename to apps/sim/tools/pinecone/types.ts diff --git a/sim/tools/pinecone/upsert_text.ts b/apps/sim/tools/pinecone/upsert_text.ts similarity index 100% rename from sim/tools/pinecone/upsert_text.ts rename to apps/sim/tools/pinecone/upsert_text.ts diff --git a/sim/tools/reddit/get_comments.ts b/apps/sim/tools/reddit/get_comments.ts similarity index 64% rename from sim/tools/reddit/get_comments.ts rename to apps/sim/tools/reddit/get_comments.ts index 94c4d25c3..38044f553 100644 --- a/sim/tools/reddit/get_comments.ts +++ b/apps/sim/tools/reddit/get_comments.ts @@ -7,86 +7,88 @@ export const getCommentsTool: ToolConfig { // Sanitize inputs const subreddit = params.subreddit.trim().replace(/^r\//, '') const sort = params.sort || 'confidence' const limit = Math.min(Math.max(1, params.limit || 50), 100) - + // Build URL return `https://www.reddit.com/r/${subreddit}/comments/${params.postId}.json?sort=${sort}&limit=${limit}` }, method: 'GET', headers: () => ({ - 'User-Agent': 'Sim Studio Reddit Tool/1.0' - }) + 'User-Agent': 'Sim Studio Reddit Tool/1.0', + }), }, - + transformResponse: async (response: Response) => { const data = await response.json() - + // Extract post data (first element in the array) const postData = data[0]?.data?.children[0]?.data || {} - + // Extract and transform comments (second element in the array) const commentsData = data[1]?.data?.children || [] - + // Recursive function to process nested comments const processComments = (comments: any[]): any[] => { - return comments.map(comment => { - const commentData = comment.data - - // Skip non-comment items like "more" items - if (!commentData || comment.kind !== 't1') { - return null - } - - // Process nested replies if they exist - const replies = commentData.replies && - commentData.replies.data && - commentData.replies.data.children ? - processComments(commentData.replies.data.children) : - [] - - return { - id: commentData.id, - author: commentData.author, - body: commentData.body, - created_utc: commentData.created_utc, - score: commentData.score, - permalink: `https://www.reddit.com${commentData.permalink}`, - replies: replies.filter(Boolean) - } - }).filter(Boolean) + return comments + .map((comment) => { + const commentData = comment.data + + // Skip non-comment items like "more" items + if (!commentData || comment.kind !== 't1') { + return null + } + + // Process nested replies if they exist + const replies = + commentData.replies && commentData.replies.data && commentData.replies.data.children + ? processComments(commentData.replies.data.children) + : [] + + return { + id: commentData.id, + author: commentData.author, + body: commentData.body, + created_utc: commentData.created_utc, + score: commentData.score, + permalink: `https://www.reddit.com${commentData.permalink}`, + replies: replies.filter(Boolean), + } + }) + .filter(Boolean) } - + const comments = processComments(commentsData) - + return { success: true, output: { @@ -97,14 +99,14 @@ export const getCommentsTool: ToolConfig { return `Error fetching Reddit comments: ${error.message}` - } -} \ No newline at end of file + }, +} diff --git a/sim/tools/reddit/get_posts.ts b/apps/sim/tools/reddit/get_posts.ts similarity index 68% rename from sim/tools/reddit/get_posts.ts rename to apps/sim/tools/reddit/get_posts.ts index cab0e87d7..a7450dc5c 100644 --- a/sim/tools/reddit/get_posts.ts +++ b/apps/sim/tools/reddit/get_posts.ts @@ -7,88 +7,90 @@ export const getPostsTool: ToolConfig = name: 'Get Reddit Posts', description: 'Fetch posts from a subreddit with different sorting options', version: '1.0.0', - + params: { subreddit: { type: 'string', required: true, - description: 'The name of the subreddit to fetch posts from (without the r/ prefix)' + description: 'The name of the subreddit to fetch posts from (without the r/ prefix)', }, sort: { type: 'string', required: false, - description: 'Sort method for posts: "hot", "new", "top", or "rising" (default: "hot")' + description: 'Sort method for posts: "hot", "new", "top", or "rising" (default: "hot")', }, limit: { type: 'number', required: false, - description: 'Maximum number of posts to return (default: 10, max: 100)' + description: 'Maximum number of posts to return (default: 10, max: 100)', }, time: { type: 'string', required: false, - description: 'Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" (default: "day")' - } + description: + 'Time filter for "top" sorted posts: "day", "week", "month", "year", or "all" (default: "day")', + }, }, - + request: { url: (params: RedditPostsParams) => { // Sanitize inputs const subreddit = params.subreddit.trim().replace(/^r\//, '') const sort = params.sort || 'hot' const limit = Math.min(Math.max(1, params.limit || 10), 100) - + // Build URL with appropriate parameters let url = `https://www.reddit.com/r/${subreddit}/${sort}.json?limit=${limit}` - + // Add time parameter only for 'top' sorting if (sort === 'top' && params.time) { url += `&t=${params.time}` } - + return url }, method: 'GET', headers: () => ({ - 'User-Agent': 'Sim Studio Reddit Tool/1.0' - }) + 'User-Agent': 'Sim Studio Reddit Tool/1.0', + }), }, - + transformResponse: async (response: Response) => { const data = await response.json() - + // Extract subreddit name from response const subredditName = data.data?.children[0]?.data?.subreddit || 'unknown' - + // Transform posts data - const posts = data.data?.children.map((child: any) => { - const post = child.data - return { - id: post.id, - title: post.title, - author: post.author, - url: post.url, - permalink: `https://www.reddit.com${post.permalink}`, - created_utc: post.created_utc, - score: post.score, - num_comments: post.num_comments, - is_self: post.is_self, - selftext: post.selftext, - thumbnail: post.thumbnail, - subreddit: post.subreddit - } - }) || [] - + const posts = + data.data?.children.map((child: any) => { + const post = child.data + return { + id: post.id, + title: post.title, + author: post.author, + url: post.url, + permalink: `https://www.reddit.com${post.permalink}`, + created_utc: post.created_utc, + score: post.score, + num_comments: post.num_comments, + is_self: post.is_self, + selftext: post.selftext, + thumbnail: post.thumbnail, + subreddit: post.subreddit, + } + }) || [] + return { success: true, output: { subreddit: subredditName, - posts - } + posts, + }, } }, - + transformError: (error) => { return `Error fetching Reddit posts: ${error.message}` - } -} \ No newline at end of file + }, +} diff --git a/sim/tools/reddit/hot_posts.ts b/apps/sim/tools/reddit/hot_posts.ts similarity index 100% rename from sim/tools/reddit/hot_posts.ts rename to apps/sim/tools/reddit/hot_posts.ts diff --git a/sim/tools/reddit/index.ts b/apps/sim/tools/reddit/index.ts similarity index 100% rename from sim/tools/reddit/index.ts rename to apps/sim/tools/reddit/index.ts index d35c0cdef..e371fe017 100644 --- a/sim/tools/reddit/index.ts +++ b/apps/sim/tools/reddit/index.ts @@ -1,6 +1,6 @@ -import { hotPostsTool } from './hot_posts' -import { getPostsTool } from './get_posts' import { getCommentsTool } from './get_comments' +import { getPostsTool } from './get_posts' +import { hotPostsTool } from './hot_posts' export const redditHotPostsTool = hotPostsTool export const redditGetPostsTool = getPostsTool diff --git a/sim/tools/reddit/types.ts b/apps/sim/tools/reddit/types.ts similarity index 100% rename from sim/tools/reddit/types.ts rename to apps/sim/tools/reddit/types.ts diff --git a/sim/tools/registry.ts b/apps/sim/tools/registry.ts similarity index 88% rename from sim/tools/registry.ts rename to apps/sim/tools/registry.ts index 6e27ca3ca..f87c1820f 100644 --- a/sim/tools/registry.ts +++ b/apps/sim/tools/registry.ts @@ -1,48 +1,69 @@ -import { airtableCreateRecordsTool, airtableGetRecordTool, airtableListRecordsTool, airtableUpdateRecordTool } from './airtable' +import { + airtableCreateRecordsTool, + airtableGetRecordTool, + airtableListRecordsTool, + airtableUpdateRecordTool, +} from './airtable' import { autoblocksPromptManagerTool } from './autoblocks' import { browserUseRunTaskTool } from './browser_use' +import { clayPopulateTool } from './clay' import { confluenceRetrieveTool, confluenceUpdateTool } from './confluence' -import { docsCreateTool, docsReadTool, docsWriteTool } from './google_docs' -import { driveDownloadTool, driveListTool, driveUploadTool } from './google_drive' +import { elevenLabsTtsTool } from './elevenlabs' import { exaAnswerTool, exaFindSimilarLinksTool, exaGetContentsTool, exaSearchTool } from './exa' import { fileParseTool } from './file' import { scrapeTool } from './firecrawl' import { functionExecuteTool } from './function' -import { githubCommentTool, githubLatestCommitTool, githubPrTool, githubRepoInfoTool } from './github' +import { + githubCommentTool, + githubLatestCommitTool, + githubPrTool, + githubRepoInfoTool, +} from './github' import { gmailReadTool, gmailSearchTool, gmailSendTool } from './gmail' -import { guestyGuestTool, guestyReservationTool } from './guesty' import { searchTool as googleSearchTool } from './google' +import { docsCreateTool, docsReadTool, docsWriteTool } from './google_docs' +import { driveDownloadTool, driveListTool, driveUploadTool } from './google_drive' +import { + sheetsAppendTool, + sheetsReadTool, + sheetsUpdateTool, + sheetsWriteTool, +} from './google_sheets' +import { guestyGuestTool, guestyReservationTool } from './guesty' import { requestTool as httpRequest } from './http' import { contactsTool as hubspotContacts } from './hubspot/contacts' import { readUrlTool } from './jina' +import { jiraBulkRetrieveTool, jiraRetrieveTool, jiraUpdateTool, jiraWriteTool } from './jira' import { linkupSearchTool } from './linkup' -import { mem0AddMemoriesTool, mem0SearchMemoriesTool, mem0GetMemoriesTool } from './mem0' +import { mem0AddMemoriesTool, mem0GetMemoriesTool, mem0SearchMemoriesTool } from './mem0' import { mistralParserTool } from './mistral' import { notionReadTool, notionWriteTool } from './notion' import { dalleTool, embeddingsTool as openAIEmbeddings } from './openai' import { perplexityChatTool } from './perplexity' -import { pineconeFetchTool, pineconeGenerateEmbeddingsTool, pineconeSearchTextTool, pineconeSearchVectorTool, pineconeUpsertTextTool } from './pinecone' -import { redditHotPostsTool, redditGetPostsTool, redditGetCommentsTool } from './reddit' +import { + pineconeFetchTool, + pineconeGenerateEmbeddingsTool, + pineconeSearchTextTool, + pineconeSearchVectorTool, + pineconeUpsertTextTool, +} from './pinecone' +import { redditGetCommentsTool, redditGetPostsTool, redditHotPostsTool } from './reddit' +import { s3GetObjectTool } from './s3' import { opportunitiesTool as salesforceOpportunities } from './salesforce/opportunities' import { searchTool as serperSearch } from './serper' -import { sheetsReadTool, sheetsUpdateTool, sheetsWriteTool, sheetsAppendTool } from './google_sheets' import { slackMessageTool } from './slack' import { stagehandAgentTool, stagehandExtractTool } from './stagehand' import { supabaseInsertTool, supabaseQueryTool } from './supabase' import { tavilyExtractTool, tavilySearchTool } from './tavily' +import { telegramMessageTool } from './telegram' import { thinkingTool } from './thinking' import { sendSMSTool } from './twilio' import { typeformFilesTool, typeformInsightsTool, typeformResponsesTool } from './typeform' +import { ToolConfig } from './types' import { visionTool } from './vision' import { whatsappSendMessageTool } from './whatsapp' import { xReadTool, xSearchTool, xUserTool, xWriteTool } from './x' import { youtubeSearchTool } from './youtube' -import { elevenLabsTtsTool } from './elevenlabs' -import { ToolConfig } from './types' -import { s3GetObjectTool } from './s3' -import { jiraRetrieveTool, jiraUpdateTool, jiraWriteTool, jiraBulkRetrieveTool } from './jira' -import { telegramMessageTool } from './telegram' -import { clayPopulateTool } from './clay' // Registry of all available tools export const tools: Record = { @@ -131,4 +152,4 @@ export const tools: Record = { s3_get_object: s3GetObjectTool, telegram_message: telegramMessageTool, clay_populate: clayPopulateTool, -} \ No newline at end of file +} diff --git a/sim/tools/s3/get_object.ts b/apps/sim/tools/s3/get_object.ts similarity index 75% rename from sim/tools/s3/get_object.ts rename to apps/sim/tools/s3/get_object.ts index 0721bd0a5..8604a53ab 100644 --- a/sim/tools/s3/get_object.ts +++ b/apps/sim/tools/s3/get_object.ts @@ -1,5 +1,5 @@ -import { ToolConfig } from '../types' import crypto from 'crypto' +import { ToolConfig } from '../types' // Function to encode S3 path components function encodeS3PathComponent(pathComponent: string): string { @@ -7,11 +7,19 @@ function encodeS3PathComponent(pathComponent: string): string { } // Function to calculate AWS signature key -function getSignatureKey(key: string, dateStamp: string, regionName: string, serviceName: string): Buffer { +function getSignatureKey( + key: string, + dateStamp: string, + regionName: string, + serviceName: string +): Buffer { if (!key || typeof key !== 'string') { throw new Error('Invalid key provided to getSignatureKey') } - const kDate = crypto.createHmac('sha256', 'AWS4' + key).update(dateStamp).digest() + const kDate = crypto + .createHmac('sha256', 'AWS4' + key) + .update(dateStamp) + .digest() const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest() const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest() const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest() @@ -33,7 +41,9 @@ function parseS3Uri(s3Uri: string): { bucketName: string; region: string; object return { bucketName, region, objectKey } } catch (error) { - throw new Error('Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file') + throw new Error( + 'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file' + ) } } @@ -43,10 +53,10 @@ function generatePresignedUrl(params: any, expiresIn: number = 3600): string { const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '') const dateStamp = amzDate.slice(0, 8) const encodedPath = encodeS3PathComponent(params.objectKey) - + // Set expiration time const expires = Math.floor(Date.now() / 1000) + expiresIn - + // Create the canonical request const method = 'GET' const canonicalUri = `/${encodedPath}` @@ -54,26 +64,36 @@ function generatePresignedUrl(params: any, expiresIn: number = 3600): string { const canonicalHeaders = 'host:' + params.bucketName + '.s3.' + params.region + '.amazonaws.com\n' const signedHeaders = 'host' const payloadHash = 'UNSIGNED-PAYLOAD' - - const canonicalRequest = method + '\n' + - canonicalUri + '\n' + - canonicalQueryString + '\n' + - canonicalHeaders + '\n' + - signedHeaders + '\n' + - payloadHash - + + const canonicalRequest = + method + + '\n' + + canonicalUri + + '\n' + + canonicalQueryString + + '\n' + + canonicalHeaders + + '\n' + + signedHeaders + + '\n' + + payloadHash + // Create string to sign const algorithm = 'AWS4-HMAC-SHA256' const credentialScope = dateStamp + '/' + params.region + '/s3/aws4_request' - const stringToSign = algorithm + '\n' + - amzDate + '\n' + - credentialScope + '\n' + - crypto.createHash('sha256').update(canonicalRequest).digest('hex') - + const stringToSign = + algorithm + + '\n' + + amzDate + + '\n' + + credentialScope + + '\n' + + crypto.createHash('sha256').update(canonicalRequest).digest('hex') + // Calculate signature const signingKey = getSignatureKey(params.secretAccessKey, dateStamp, params.region, 's3') const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex') - + // Create signed URL return `https://${params.bucketName}.s3.${params.region}.amazonaws.com/${encodedPath}?${canonicalQueryString}&X-Amz-Signature=${signature}` } @@ -99,20 +119,22 @@ export const s3GetObjectTool: ToolConfig = { type: 'string', required: true, description: 'S3 Object URL (e.g., https://bucket-name.s3.region.amazonaws.com/path/to/file)', - } + }, }, request: { url: (params) => { try { const { bucketName, region, objectKey } = parseS3Uri(params.s3Uri) - + params.bucketName = bucketName params.region = region params.objectKey = objectKey return `https://${bucketName}.s3.${region}.amazonaws.com/${encodeS3PathComponent(objectKey)}` } catch (error) { - throw new Error('Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file') + throw new Error( + 'Invalid S3 Object URL format. Expected format: https://bucket-name.s3.region.amazonaws.com/path/to/file' + ) } }, method: 'HEAD', @@ -130,51 +152,68 @@ export const s3GetObjectTool: ToolConfig = { const date = new Date() const amzDate = date.toISOString().replace(/[:-]|\.\d{3}/g, '') const dateStamp = amzDate.slice(0, 8) - + const method = 'HEAD' const encodedPath = encodeS3PathComponent(params.objectKey) const canonicalUri = `/${encodedPath}` const canonicalQueryString = '' const payloadHash = crypto.createHash('sha256').update('').digest('hex') const host = `${params.bucketName}.s3.${params.region}.amazonaws.com` - const canonicalHeaders = - `host:${host}\n` + - `x-amz-content-sha256:${payloadHash}\n` + - `x-amz-date:${amzDate}\n` + const canonicalHeaders = + `host:${host}\n` + `x-amz-content-sha256:${payloadHash}\n` + `x-amz-date:${amzDate}\n` const signedHeaders = 'host;x-amz-content-sha256;x-amz-date' - const canonicalRequest = method + '\n' + - canonicalUri + '\n' + - canonicalQueryString + '\n' + - canonicalHeaders + '\n' + - signedHeaders + '\n' + - payloadHash - + const canonicalRequest = + method + + '\n' + + canonicalUri + + '\n' + + canonicalQueryString + + '\n' + + canonicalHeaders + + '\n' + + signedHeaders + + '\n' + + payloadHash + const algorithm = 'AWS4-HMAC-SHA256' const credentialScope = dateStamp + '/' + params.region + '/s3/aws4_request' - const stringToSign = algorithm + '\n' + - amzDate + '\n' + - credentialScope + '\n' + - crypto.createHash('sha256').update(canonicalRequest).digest('hex') - + const stringToSign = + algorithm + + '\n' + + amzDate + + '\n' + + credentialScope + + '\n' + + crypto.createHash('sha256').update(canonicalRequest).digest('hex') + const signingKey = getSignatureKey(params.secretAccessKey, dateStamp, params.region, 's3') const signature = crypto.createHmac('sha256', signingKey).update(stringToSign).digest('hex') - - const authorizationHeader = algorithm + ' ' + - 'Credential=' + params.accessKeyId + '/' + credentialScope + ', ' + - 'SignedHeaders=' + signedHeaders + ', ' + - 'Signature=' + signature - + + const authorizationHeader = + algorithm + + ' ' + + 'Credential=' + + params.accessKeyId + + '/' + + credentialScope + + ', ' + + 'SignedHeaders=' + + signedHeaders + + ', ' + + 'Signature=' + + signature + return { - 'Host': host, + Host: host, 'X-Amz-Content-Sha256': payloadHash, 'X-Amz-Date': amzDate, - 'Authorization': authorizationHeader + Authorization: authorizationHeader, } } catch (error) { const errorMessage = error instanceof Error ? error.message : 'Unknown error' throw new Error('Failed to generate request headers: ' + errorMessage) } - } + }, }, transformResponse: async (response: Response, params) => { try { @@ -207,9 +246,9 @@ export const s3GetObjectTool: ToolConfig = { fileType: contentType, size: contentLength, name: fileName, - lastModified: lastModified - } - } + lastModified: lastModified, + }, + }, } } catch (error: unknown) { const errorMessage = error instanceof Error ? error.message : 'Unknown error' @@ -221,10 +260,10 @@ export const s3GetObjectTool: ToolConfig = { fileType: 'error', size: 0, name: params.objectKey?.split('/').pop() || 'unknown', - error: errorMessage - } - } + error: errorMessage, + }, + }, } } - } -} \ No newline at end of file + }, +} diff --git a/sim/tools/s3/index.ts b/apps/sim/tools/s3/index.ts similarity index 64% rename from sim/tools/s3/index.ts rename to apps/sim/tools/s3/index.ts index 9bba93b94..8226e43c5 100644 --- a/sim/tools/s3/index.ts +++ b/apps/sim/tools/s3/index.ts @@ -1,3 +1,3 @@ import { s3GetObjectTool } from './get_object' -export { s3GetObjectTool } \ No newline at end of file +export { s3GetObjectTool } diff --git a/sim/tools/s3/types.ts b/apps/sim/tools/s3/types.ts similarity index 99% rename from sim/tools/s3/types.ts rename to apps/sim/tools/s3/types.ts index ce0bd90f7..32e383c8e 100644 --- a/sim/tools/s3/types.ts +++ b/apps/sim/tools/s3/types.ts @@ -11,4 +11,4 @@ export interface S3Response extends ToolResponse { error?: string } } -} \ No newline at end of file +} diff --git a/sim/tools/salesforce/opportunities.ts b/apps/sim/tools/salesforce/opportunities.ts similarity index 100% rename from sim/tools/salesforce/opportunities.ts rename to apps/sim/tools/salesforce/opportunities.ts diff --git a/sim/tools/google/index.ts b/apps/sim/tools/serper/index.ts similarity index 63% rename from sim/tools/google/index.ts rename to apps/sim/tools/serper/index.ts index 293effaa6..93b47653a 100644 --- a/sim/tools/google/index.ts +++ b/apps/sim/tools/serper/index.ts @@ -1,3 +1,3 @@ import { searchTool } from './search' -export { searchTool } \ No newline at end of file +export { searchTool } diff --git a/sim/tools/serper/search.ts b/apps/sim/tools/serper/search.ts similarity index 100% rename from sim/tools/serper/search.ts rename to apps/sim/tools/serper/search.ts diff --git a/apps/sim/tools/serper/types.ts b/apps/sim/tools/serper/types.ts new file mode 100644 index 000000000..03ea27178 --- /dev/null +++ b/apps/sim/tools/serper/types.ts @@ -0,0 +1,28 @@ +import { ToolResponse } from '../types' + +export interface SearchParams { + query: string + apiKey: string + num?: number + gl?: string // country code + hl?: string // language code + type?: 'search' | 'news' | 'places' | 'images' +} + +export interface SearchResult { + title: string + link: string + snippet: string + position: number + imageUrl?: string + date?: string + rating?: string + reviews?: string + address?: string +} + +export interface SearchResponse extends ToolResponse { + output: { + searchResults: SearchResult[] + } +} diff --git a/sim/tools/slack/index.ts b/apps/sim/tools/slack/index.ts similarity index 62% rename from sim/tools/slack/index.ts rename to apps/sim/tools/slack/index.ts index f470c333e..642218ac9 100644 --- a/sim/tools/slack/index.ts +++ b/apps/sim/tools/slack/index.ts @@ -1,3 +1,3 @@ import { slackMessageTool } from './message' -export { slackMessageTool } \ No newline at end of file +export { slackMessageTool } diff --git a/sim/tools/slack/message.ts b/apps/sim/tools/slack/message.ts similarity index 100% rename from sim/tools/slack/message.ts rename to apps/sim/tools/slack/message.ts diff --git a/apps/sim/tools/slack/types.ts b/apps/sim/tools/slack/types.ts new file mode 100644 index 000000000..f5f1e2d16 --- /dev/null +++ b/apps/sim/tools/slack/types.ts @@ -0,0 +1,14 @@ +import { ToolResponse } from '../types' + +export interface SlackMessageParams { + apiKey: string + channel: string + text: string +} + +export interface SlackMessageResponse extends ToolResponse { + output: { + ts: string + channel: string + } +} diff --git a/sim/tools/stagehand/agent.ts b/apps/sim/tools/stagehand/agent.ts similarity index 100% rename from sim/tools/stagehand/agent.ts rename to apps/sim/tools/stagehand/agent.ts diff --git a/sim/tools/stagehand/extract.ts b/apps/sim/tools/stagehand/extract.ts similarity index 100% rename from sim/tools/stagehand/extract.ts rename to apps/sim/tools/stagehand/extract.ts diff --git a/sim/tools/stagehand/index.ts b/apps/sim/tools/stagehand/index.ts similarity index 100% rename from sim/tools/stagehand/index.ts rename to apps/sim/tools/stagehand/index.ts diff --git a/sim/tools/stagehand/types.ts b/apps/sim/tools/stagehand/types.ts similarity index 100% rename from sim/tools/stagehand/types.ts rename to apps/sim/tools/stagehand/types.ts diff --git a/sim/tools/supabase/index.ts b/apps/sim/tools/supabase/index.ts similarity index 100% rename from sim/tools/supabase/index.ts rename to apps/sim/tools/supabase/index.ts diff --git a/sim/tools/supabase/insert.ts b/apps/sim/tools/supabase/insert.ts similarity index 100% rename from sim/tools/supabase/insert.ts rename to apps/sim/tools/supabase/insert.ts diff --git a/sim/tools/supabase/query.ts b/apps/sim/tools/supabase/query.ts similarity index 100% rename from sim/tools/supabase/query.ts rename to apps/sim/tools/supabase/query.ts diff --git a/sim/tools/supabase/types.ts b/apps/sim/tools/supabase/types.ts similarity index 100% rename from sim/tools/supabase/types.ts rename to apps/sim/tools/supabase/types.ts diff --git a/sim/tools/tavily/extract.ts b/apps/sim/tools/tavily/extract.ts similarity index 100% rename from sim/tools/tavily/extract.ts rename to apps/sim/tools/tavily/extract.ts diff --git a/sim/tools/tavily/index.ts b/apps/sim/tools/tavily/index.ts similarity index 100% rename from sim/tools/tavily/index.ts rename to apps/sim/tools/tavily/index.ts diff --git a/sim/tools/tavily/search.ts b/apps/sim/tools/tavily/search.ts similarity index 100% rename from sim/tools/tavily/search.ts rename to apps/sim/tools/tavily/search.ts diff --git a/sim/tools/tavily/types.ts b/apps/sim/tools/tavily/types.ts similarity index 100% rename from sim/tools/tavily/types.ts rename to apps/sim/tools/tavily/types.ts diff --git a/sim/tools/telegram/index.ts b/apps/sim/tools/telegram/index.ts similarity index 100% rename from sim/tools/telegram/index.ts rename to apps/sim/tools/telegram/index.ts diff --git a/sim/tools/telegram/message.ts b/apps/sim/tools/telegram/message.ts similarity index 92% rename from sim/tools/telegram/message.ts rename to apps/sim/tools/telegram/message.ts index c7e42dc4c..306a2c025 100644 --- a/sim/tools/telegram/message.ts +++ b/apps/sim/tools/telegram/message.ts @@ -28,7 +28,8 @@ export const telegramMessageTool: ToolConfig `https://api.telegram.org/bot${params.botToken}/sendMessage`, + url: (params: TelegramMessageParams) => + `https://api.telegram.org/bot${params.botToken}/sendMessage`, method: 'POST', headers: () => ({ 'Content-Type': 'application/json', diff --git a/apps/sim/tools/telegram/types.ts b/apps/sim/tools/telegram/types.ts new file mode 100644 index 000000000..446e02348 --- /dev/null +++ b/apps/sim/tools/telegram/types.ts @@ -0,0 +1,23 @@ +import { ToolResponse } from '../types' + +export interface TelegramMessageParams { + botToken: string + chatId: string + text: string +} + +export interface TelegramMessageResponse extends ToolResponse { + output: { + ok: boolean + result: { + message_id: number + chat: { + id: number + type: string + username: string + } + date: number + text: string + } + } +} diff --git a/sim/tools/thinking/index.ts b/apps/sim/tools/thinking/index.ts similarity index 100% rename from sim/tools/thinking/index.ts rename to apps/sim/tools/thinking/index.ts diff --git a/sim/tools/thinking/tool.ts b/apps/sim/tools/thinking/tool.ts similarity index 100% rename from sim/tools/thinking/tool.ts rename to apps/sim/tools/thinking/tool.ts diff --git a/sim/tools/thinking/types.ts b/apps/sim/tools/thinking/types.ts similarity index 100% rename from sim/tools/thinking/types.ts rename to apps/sim/tools/thinking/types.ts diff --git a/sim/tools/twilio/index.ts b/apps/sim/tools/twilio/index.ts similarity index 100% rename from sim/tools/twilio/index.ts rename to apps/sim/tools/twilio/index.ts diff --git a/sim/tools/twilio/send_sms.ts b/apps/sim/tools/twilio/send_sms.ts similarity index 100% rename from sim/tools/twilio/send_sms.ts rename to apps/sim/tools/twilio/send_sms.ts diff --git a/sim/tools/twilio/types.ts b/apps/sim/tools/twilio/types.ts similarity index 100% rename from sim/tools/twilio/types.ts rename to apps/sim/tools/twilio/types.ts diff --git a/sim/tools/typeform/files.test.ts b/apps/sim/tools/typeform/files.test.ts similarity index 100% rename from sim/tools/typeform/files.test.ts rename to apps/sim/tools/typeform/files.test.ts diff --git a/sim/tools/typeform/files.ts b/apps/sim/tools/typeform/files.ts similarity index 99% rename from sim/tools/typeform/files.ts rename to apps/sim/tools/typeform/files.ts index e138b816d..31e5db273 100644 --- a/sim/tools/typeform/files.ts +++ b/apps/sim/tools/typeform/files.ts @@ -1,7 +1,6 @@ import { ToolConfig } from '../types' import { TypeformFilesParams, TypeformFilesResponse } from './types' - export const filesTool: ToolConfig = { id: 'typeform_files', name: 'Typeform Files', diff --git a/sim/tools/typeform/index.test.ts b/apps/sim/tools/typeform/index.test.ts similarity index 100% rename from sim/tools/typeform/index.test.ts rename to apps/sim/tools/typeform/index.test.ts diff --git a/sim/tools/typeform/index.ts b/apps/sim/tools/typeform/index.ts similarity index 100% rename from sim/tools/typeform/index.ts rename to apps/sim/tools/typeform/index.ts diff --git a/sim/tools/typeform/insights.test.ts b/apps/sim/tools/typeform/insights.test.ts similarity index 100% rename from sim/tools/typeform/insights.test.ts rename to apps/sim/tools/typeform/insights.test.ts diff --git a/sim/tools/typeform/insights.ts b/apps/sim/tools/typeform/insights.ts similarity index 100% rename from sim/tools/typeform/insights.ts rename to apps/sim/tools/typeform/insights.ts diff --git a/sim/tools/typeform/responses.test.ts b/apps/sim/tools/typeform/responses.test.ts similarity index 100% rename from sim/tools/typeform/responses.test.ts rename to apps/sim/tools/typeform/responses.test.ts diff --git a/sim/tools/typeform/responses.ts b/apps/sim/tools/typeform/responses.ts similarity index 100% rename from sim/tools/typeform/responses.ts rename to apps/sim/tools/typeform/responses.ts diff --git a/apps/sim/tools/typeform/types.ts b/apps/sim/tools/typeform/types.ts new file mode 100644 index 000000000..0fbae9060 --- /dev/null +++ b/apps/sim/tools/typeform/types.ts @@ -0,0 +1,105 @@ +import { ToolResponse } from '../types' + +export interface TypeformFilesParams { + formId: string + responseId: string + fieldId: string + filename: string + inline?: boolean + apiKey: string +} + +export interface TypeformFilesResponse extends ToolResponse { + output: { + fileUrl: string + contentType: string + filename: string + } +} + +export interface TypeformInsightsParams { + formId: string + apiKey: string +} + +// This is the actual output data structure from the API +export interface TypeformInsightsData { + fields: Array<{ + dropoffs: number + id: string + label: string + ref: string + title: string + type: string + views: number + }> + form: { + platforms: Array<{ + average_time: number + completion_rate: number + platform: string + responses_count: number + total_visits: number + unique_visits: number + }> + summary: { + average_time: number + completion_rate: number + responses_count: number + total_visits: number + unique_visits: number + } + } +} + +// The ToolResponse uses a union type to allow either successful data or empty object in error case +export interface TypeformInsightsResponse extends ToolResponse { + output: TypeformInsightsData | Record +} + +export interface TypeformResponsesParams { + formId: string + apiKey: string + pageSize?: number + since?: string + until?: string + completed?: string +} + +export interface TypeformResponsesResponse extends ToolResponse { + output: { + total_items: number + page_count: number + items: Array<{ + landing_id: string + token: string + landed_at: string + submitted_at: string + metadata: { + user_agent: string + platform: string + referer: string + network_id: string + browser: string + } + answers: Array<{ + field: { + id: string + type: string + ref: string + } + type: string + [key: string]: any + }> + hidden: Record + calculated: { + score: number + } + variables: Array<{ + key: string + type: string + [key: string]: any + }> + }> + } +} diff --git a/sim/tools/types.ts b/apps/sim/tools/types.ts similarity index 100% rename from sim/tools/types.ts rename to apps/sim/tools/types.ts diff --git a/sim/tools/utils.test.ts b/apps/sim/tools/utils.test.ts similarity index 94% rename from sim/tools/utils.test.ts rename to apps/sim/tools/utils.test.ts index 88b42a66a..5c305f135 100644 --- a/sim/tools/utils.test.ts +++ b/apps/sim/tools/utils.test.ts @@ -1,13 +1,13 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import { ToolConfig, ToolResponse } from './types' -import { - executeRequest, - formatRequestParams, - transformTable, - validateToolRequest, +import { + createCustomToolRequestBody, createParamSchema, + executeRequest, + formatRequestParams, getClientEnvVars, - createCustomToolRequestBody + transformTable, + validateToolRequest, } from './utils' // Mock logger @@ -25,14 +25,14 @@ vi.mock('@/stores/settings/environment/store', () => { const mockStore = { getAllVariables: vi.fn().mockReturnValue({ API_KEY: { value: 'mock-api-key' }, - BASE_URL: { value: 'https://example.com' } - }) + BASE_URL: { value: 'https://example.com' }, + }), } - + return { useEnvironmentStore: { - getState: vi.fn().mockImplementation(() => mockStore) - } + getState: vi.fn().mockImplementation(() => mockStore), + }, } }) @@ -46,7 +46,7 @@ beforeEach(() => { afterEach(() => { // Reset window to original value global.window = originalWindow - + // Clear mock call history vi.clearAllMocks() }) @@ -511,12 +511,12 @@ describe('createParamSchema', () => { type: 'object', properties: { required1: { type: 'string', description: 'Required param' }, - optional1: { type: 'number', description: 'Optional param' } + optional1: { type: 'number', description: 'Optional param' }, }, - required: ['required1'] - } - } - } + required: ['required1'], + }, + }, + }, } const result = createParamSchema(customTool) @@ -526,34 +526,34 @@ describe('createParamSchema', () => { type: 'string', required: true, requiredForToolCall: true, - description: 'Required param' + description: 'Required param', }, optional1: { type: 'number', required: false, requiredForToolCall: false, - description: 'Optional param' - } + description: 'Optional param', + }, }) }) it('should handle empty or missing schema gracefully', () => { - const emptyTool = { + const emptyTool = { id: 'empty-tool', title: 'Empty Tool', - schema: {} + schema: {}, } const result = createParamSchema(emptyTool) - + expect(result).toEqual({}) - - const missingPropsTool = { + + const missingPropsTool = { id: 'missing-props', title: 'Missing Props', - schema: { function: { parameters: {} } } + schema: { function: { parameters: {} } }, } - + const result2 = createParamSchema(missingPropsTool) expect(result2).toEqual({}) }) @@ -565,24 +565,24 @@ describe('getClientEnvVars', () => { const mockStoreGetter = () => ({ getAllVariables: () => ({ API_KEY: { value: 'mock-api-key' }, - BASE_URL: { value: 'https://example.com' } - }) + BASE_URL: { value: 'https://example.com' }, + }), }) - + const result = getClientEnvVars(mockStoreGetter) - + expect(result).toEqual({ API_KEY: 'mock-api-key', - BASE_URL: 'https://example.com' + BASE_URL: 'https://example.com', }) }) - + it('should return empty object in server environment', () => { // Remove window to simulate server environment global.window = undefined as any - + const result = getClientEnvVars() - + expect(result).toEqual({}) }) }) @@ -593,56 +593,56 @@ describe('createCustomToolRequestBody', () => { code: 'return a + b', schema: { function: { - parameters: { type: 'object', properties: {} } - } - } + parameters: { type: 'object', properties: {} }, + }, + }, } - + // Create a mock store for testing const mockStoreGetter = () => ({ getAllVariables: () => ({ API_KEY: { value: 'mock-api-key' }, - BASE_URL: { value: 'https://example.com' } - }) + BASE_URL: { value: 'https://example.com' }, + }), }) - + const bodyFn = createCustomToolRequestBody(customTool, true, undefined, mockStoreGetter) const result = bodyFn({ a: 5, b: 3 }) - + expect(result).toEqual({ code: 'return a + b', params: { a: 5, b: 3 }, schema: { type: 'object', properties: {} }, envVars: { API_KEY: 'mock-api-key', - BASE_URL: 'https://example.com' + BASE_URL: 'https://example.com', }, workflowId: undefined, - isCustomTool: true + isCustomTool: true, }) }) - + it('should create request body function for server-side execution', () => { const customTool = { code: 'return a + b', schema: { function: { - parameters: { type: 'object', properties: {} } - } - } + parameters: { type: 'object', properties: {} }, + }, + }, } - + const workflowId = 'test-workflow-123' const bodyFn = createCustomToolRequestBody(customTool, false, workflowId) const result = bodyFn({ a: 5, b: 3 }) - + expect(result).toEqual({ code: 'return a + b', params: { a: 5, b: 3 }, schema: { type: 'object', properties: {} }, envVars: {}, workflowId: 'test-workflow-123', - isCustomTool: true + isCustomTool: true, }) }) }) diff --git a/sim/tools/utils.ts b/apps/sim/tools/utils.ts similarity index 94% rename from sim/tools/utils.ts rename to apps/sim/tools/utils.ts index 1bf746017..f01d00859 100644 --- a/sim/tools/utils.ts +++ b/apps/sim/tools/utils.ts @@ -1,9 +1,9 @@ import { createLogger } from '@/lib/logs/console-logger' -import { TableRow } from './types' -import { ToolConfig, ToolResponse } from './types' +import { useCustomToolsStore } from '@/stores/custom-tools/store' import { useEnvironmentStore } from '@/stores/settings/environment/store' import { tools } from './registry' -import { useCustomToolsStore } from '@/stores/custom-tools/store' +import { TableRow } from './types' +import { ToolConfig, ToolResponse } from './types' const logger = createLogger('ToolsUtils') @@ -181,32 +181,32 @@ export function createParamSchema(customTool: any): Record { const params: Record = {} if (customTool.schema.function?.parameters?.properties) { - const properties = customTool.schema.function.parameters.properties; - const required = customTool.schema.function.parameters.required || []; - const optionalToolInputs = customTool.schema.function.parameters.optionalToolInputs || []; - + const properties = customTool.schema.function.parameters.properties + const required = customTool.schema.function.parameters.required || [] + const optionalToolInputs = customTool.schema.function.parameters.optionalToolInputs || [] + Object.entries(properties).forEach(([key, config]: [string, any]) => { - const isRequired = required.includes(key); - const isOptionalInput = optionalToolInputs.includes(key); - + const isRequired = required.includes(key) + const isOptionalInput = optionalToolInputs.includes(key) + // Create the base parameter configuration const paramConfig: Record = { type: config.type || 'string', required: isRequired, requiredForToolCall: isRequired, description: config.description || '', - }; - + } + // Only add optionalToolInput if it's true to maintain backward compatibility with tests if (isOptionalInput) { - paramConfig.optionalToolInput = true; + paramConfig.optionalToolInput = true } - - params[key] = paramConfig; + + params[key] = paramConfig }) } - return params; + return params } /** @@ -214,8 +214,8 @@ export function createParamSchema(customTool: any): Record { * @param getStore Optional function to get the store (useful for testing) */ export function getClientEnvVars(getStore?: () => any): Record { - if (!isBrowser()) return {}; - + if (!isBrowser()) return {} + try { // Allow injecting the store for testing const envStore = getStore ? getStore() : useEnvironmentStore.getState() @@ -243,8 +243,8 @@ export function getClientEnvVars(getStore?: () => any): Record { * @param getStore Optional function to get the store (useful for testing) */ export function createCustomToolRequestBody( - customTool: any, - isClient: boolean = true, + customTool: any, + isClient: boolean = true, workflowId?: string, getStore?: () => any ) { @@ -275,7 +275,7 @@ export function getTool(toolId: string): ToolConfig | undefined { // Only try to use the sync version on the client const customToolsStore = useCustomToolsStore.getState() const identifier = toolId.replace('custom_', '') - + // Try to find the tool directly by ID first let customTool = customToolsStore.getTool(identifier) @@ -295,7 +295,10 @@ export function getTool(toolId: string): ToolConfig | undefined { } // Get a tool by its ID asynchronously (supports server-side) -export async function getToolAsync(toolId: string, workflowId?: string): Promise { +export async function getToolAsync( + toolId: string, + workflowId?: string +): Promise { // Check for built-in tools const builtInTool = tools[toolId] if (builtInTool) return builtInTool @@ -311,7 +314,7 @@ export async function getToolAsync(toolId: string, workflowId?: string): Promise // Helper function to create a tool config from a custom tool function createToolConfig(customTool: any, customToolId: string): ToolConfig { // Create a parameter schema from the custom tool schema - const params = createParamSchema(customTool); + const params = createParamSchema(customTool) // Create a tool config for the custom tool return { @@ -368,7 +371,7 @@ function createToolConfig(customTool: any, customToolId: string): ToolConfig { // Look for the variable in our environment store first, then in params const envVar = envVars[varName] const varValue = envVar ? envVar.value : mergedParams[varName] || '' - + resolvedCode = resolvedCode.replaceAll(match, varValue) } @@ -425,44 +428,47 @@ function createToolConfig(customTool: any, customToolId: string): ToolConfig { } // Create a tool config from a custom tool definition -async function getCustomTool(customToolId: string, workflowId?: string): Promise { +async function getCustomTool( + customToolId: string, + workflowId?: string +): Promise { const identifier = customToolId.replace('custom_', '') - + try { const baseUrl = process.env.NEXT_PUBLIC_APP_URL || '' const url = new URL('/api/tools/custom', baseUrl) - + // Add workflowId as a query parameter if available if (workflowId) { url.searchParams.append('workflowId', workflowId) } - + const response = await fetch(url.toString()) - + if (!response.ok) { logger.error(`Failed to fetch custom tools: ${response.statusText}`) return undefined } - + const result = await response.json() if (!result.data || !Array.isArray(result.data)) { logger.error(`Invalid response when fetching custom tools: ${JSON.stringify(result)}`) return undefined } - + // Try to find the tool by ID or title - const customTool = result.data.find((tool: any) => - tool.id === identifier || tool.title === identifier + const customTool = result.data.find( + (tool: any) => tool.id === identifier || tool.title === identifier ) - + if (!customTool) { logger.error(`Custom tool not found: ${identifier}`) return undefined } - + // Create a parameter schema - const params = createParamSchema(customTool); + const params = createParamSchema(customTool) // Create a tool config for the custom tool return { diff --git a/sim/tools/vision/index.ts b/apps/sim/tools/vision/index.ts similarity index 62% rename from sim/tools/vision/index.ts rename to apps/sim/tools/vision/index.ts index f313b0b9e..33ebc1c11 100644 --- a/sim/tools/vision/index.ts +++ b/apps/sim/tools/vision/index.ts @@ -1,3 +1,3 @@ import { visionTool } from './tool' -export { visionTool } \ No newline at end of file +export { visionTool } diff --git a/sim/tools/vision/tool.ts b/apps/sim/tools/vision/tool.ts similarity index 100% rename from sim/tools/vision/tool.ts rename to apps/sim/tools/vision/tool.ts diff --git a/apps/sim/tools/vision/types.ts b/apps/sim/tools/vision/types.ts new file mode 100644 index 000000000..b955f830c --- /dev/null +++ b/apps/sim/tools/vision/types.ts @@ -0,0 +1,16 @@ +import { ToolResponse } from '../types' + +export interface VisionParams { + apiKey: string + imageUrl: string + model?: string + prompt?: string +} + +export interface VisionResponse extends ToolResponse { + output: { + content: string + model?: string + tokens?: number + } +} diff --git a/sim/tools/whatsapp/index.ts b/apps/sim/tools/whatsapp/index.ts similarity index 100% rename from sim/tools/whatsapp/index.ts rename to apps/sim/tools/whatsapp/index.ts diff --git a/sim/tools/whatsapp/send_message.ts b/apps/sim/tools/whatsapp/send_message.ts similarity index 100% rename from sim/tools/whatsapp/send_message.ts rename to apps/sim/tools/whatsapp/send_message.ts diff --git a/sim/tools/whatsapp/types.ts b/apps/sim/tools/whatsapp/types.ts similarity index 100% rename from sim/tools/whatsapp/types.ts rename to apps/sim/tools/whatsapp/types.ts diff --git a/sim/tools/x/index.ts b/apps/sim/tools/x/index.ts similarity index 100% rename from sim/tools/x/index.ts rename to apps/sim/tools/x/index.ts diff --git a/sim/tools/x/read.ts b/apps/sim/tools/x/read.ts similarity index 100% rename from sim/tools/x/read.ts rename to apps/sim/tools/x/read.ts diff --git a/sim/tools/x/search.ts b/apps/sim/tools/x/search.ts similarity index 97% rename from sim/tools/x/search.ts rename to apps/sim/tools/x/search.ts index f1b2c4056..fbd7708d9 100644 --- a/sim/tools/x/search.ts +++ b/apps/sim/tools/x/search.ts @@ -89,7 +89,10 @@ export const xSearchTool: ToolConfig = { console.error('X Search API Error:', JSON.stringify(data, null, 2)) return { success: false, - error: data.error?.detail || data.error?.title || 'No results found or invalid response from X API', + error: + data.error?.detail || + data.error?.title || + 'No results found or invalid response from X API', output: { tweets: [], includes: { diff --git a/sim/tools/x/types.ts b/apps/sim/tools/x/types.ts similarity index 100% rename from sim/tools/x/types.ts rename to apps/sim/tools/x/types.ts diff --git a/sim/tools/x/user.ts b/apps/sim/tools/x/user.ts similarity index 100% rename from sim/tools/x/user.ts rename to apps/sim/tools/x/user.ts diff --git a/sim/tools/x/write.ts b/apps/sim/tools/x/write.ts similarity index 100% rename from sim/tools/x/write.ts rename to apps/sim/tools/x/write.ts diff --git a/sim/tools/youtube/index.ts b/apps/sim/tools/youtube/index.ts similarity index 61% rename from sim/tools/youtube/index.ts rename to apps/sim/tools/youtube/index.ts index c474fad01..e17092b54 100644 --- a/sim/tools/youtube/index.ts +++ b/apps/sim/tools/youtube/index.ts @@ -1,3 +1,3 @@ import { youtubeSearchTool } from './search' -export { youtubeSearchTool } \ No newline at end of file +export { youtubeSearchTool } diff --git a/sim/tools/youtube/search.ts b/apps/sim/tools/youtube/search.ts similarity index 100% rename from sim/tools/youtube/search.ts rename to apps/sim/tools/youtube/search.ts diff --git a/apps/sim/tools/youtube/types.ts b/apps/sim/tools/youtube/types.ts new file mode 100644 index 000000000..5a06820e9 --- /dev/null +++ b/apps/sim/tools/youtube/types.ts @@ -0,0 +1,21 @@ +import { ToolResponse } from '../types' + +export interface YouTubeSearchParams { + apiKey: string + query: string + maxResults?: number + pageToken?: string +} + +export interface YouTubeSearchResponse extends ToolResponse { + output: { + items: Array<{ + videoId: string + title: string + description: string + thumbnail: string + }> + totalResults: number + nextPageToken?: string + } +} diff --git a/sim/tsconfig.json b/apps/sim/tsconfig.json similarity index 87% rename from sim/tsconfig.json rename to apps/sim/tsconfig.json index 63bab6378..d005b01d9 100644 --- a/sim/tsconfig.json +++ b/apps/sim/tsconfig.json @@ -34,6 +34,7 @@ "moduleResolution": "node", "resolveJsonModule": true, "isolatedModules": true, + "allowImportingTsExtensions": true, "jsx": "preserve", "plugins": [ { @@ -41,6 +42,12 @@ } ] }, - "include": ["**/*.ts", "**/*.tsx", ".next/types/**/*.ts", "../next-env.d.ts"], + "include": [ + "**/*.ts", + "**/*.tsx", + ".next/types/**/*.ts", + "../next-env.d.ts", + "telemetry.config.js" + ], "exclude": ["node_modules"] } diff --git a/sim/vercel.json b/apps/sim/vercel.json similarity index 100% rename from sim/vercel.json rename to apps/sim/vercel.json diff --git a/sim/vitest.config.ts b/apps/sim/vitest.config.ts similarity index 100% rename from sim/vitest.config.ts rename to apps/sim/vitest.config.ts diff --git a/sim/vitest.setup.ts b/apps/sim/vitest.setup.ts similarity index 100% rename from sim/vitest.setup.ts rename to apps/sim/vitest.setup.ts diff --git a/docker-compose.yml b/docker-compose.yml index 8ed059bf2..a5ad80e41 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,11 +6,14 @@ services: context: . dockerfile: Dockerfile ports: - - "3000:3000" + - '3000:3000' volumes: - - ./sim:/app + - .:/app - /app/node_modules - - /app/.next + - /app/apps/sim/node_modules + - /app/apps/sim/.next + - /app/apps/docs/node_modules + - /app/apps/docs/.next environment: - NODE_ENV=development - DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio @@ -33,7 +36,7 @@ services: image: postgres:16 restart: always ports: - - "5432:5432" + - '5432:5432' environment: - POSTGRES_USER=postgres - POSTGRES_PASSWORD=postgres @@ -41,26 +44,26 @@ services: volumes: - postgres_data:/var/lib/postgresql/data healthcheck: - test: ["CMD-SHELL", "pg_isready -U postgres"] + test: ['CMD-SHELL', 'pg_isready -U postgres'] interval: 5s timeout: 5s retries: 5 local-llm-gpu: - profiles: + profiles: - local-gpu # This profile requires both 'local' and 'gpu' image: ollama/ollama:latest pull_policy: always volumes: - ${HOME}/.ollama:/root/.ollama ports: - - "11434:11434" + - '11434:11434' environment: - NVIDIA_DRIVER_CAPABILITIES=all - OLLAMA_LOAD_TIMEOUT=-1 - OLLAMA_KEEP_ALIVE=-1 - OLLAMA_DEBUG=1 - command: "serve" + command: 'serve' deploy: resources: reservations: @@ -69,7 +72,7 @@ services: count: all capabilities: [gpu] healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:11434/"] + test: ['CMD', 'curl', '-f', 'http://localhost:11434/'] interval: 10s timeout: 5s retries: 5 @@ -82,14 +85,14 @@ services: volumes: - ${HOME}/.ollama:/root/.ollama ports: - - "11434:11434" + - '11434:11434' environment: - OLLAMA_LOAD_TIMEOUT=-1 - OLLAMA_KEEP_ALIVE=-1 - OLLAMA_DEBUG=1 - command: "serve" + command: 'serve' healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:11434/"] + test: ['CMD', 'curl', '-f', 'http://localhost:11434/'] interval: 10s timeout: 5s retries: 5 diff --git a/docker-entrypoint.sh b/docker-entrypoint.sh index a52442e8c..a4af0c2e3 100755 --- a/docker-entrypoint.sh +++ b/docker-entrypoint.sh @@ -1,3 +1,5 @@ #!/bin/sh +cd apps/sim npx drizzle-kit push +cd ../.. exec "$@" \ No newline at end of file diff --git a/docs/content/docs/execution/basics.mdx b/docs/content/docs/execution/basics.mdx deleted file mode 100644 index 3a6a3f3b3..000000000 --- a/docs/content/docs/execution/basics.mdx +++ /dev/null @@ -1,171 +0,0 @@ ---- -title: Execution Basics -description: Understanding the fundamental execution flow in Sim Studio ---- - -import { Step, Steps } from 'fumadocs-ui/components/steps' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { Files, Folder, File } from 'fumadocs-ui/components/files' -import { Callout } from 'fumadocs-ui/components/callout' -import { AgentIcon, ApiIcon, ConditionalIcon, CodeIcon, ChartBarIcon, ConnectIcon } from '@/components/icons' - -When you run a workflow in Sim Studio, the execution engine follows a systematic process to ensure blocks are executed in the correct order and data flows properly between them. - -## Execution Flow - -The execution of a workflow follows these key steps: - - - - ### Validation - Before execution begins, the workflow is validated to ensure it has: - - An enabled starter block with no incoming connections - - Properly connected blocks with valid configurations - - No circular dependencies (except in intentional loops) - - Valid input and output types between connected blocks - - - - ### Initialization - The execution context is created, which includes: - - Environment variables for the workflow - - Input values from the starter block - - Initial state for all blocks - - Execution path tracking - - Loop iteration counters - - - - ### Block Execution - Blocks are executed in topological order (based on dependencies): - - The system identifies which blocks can be executed next - - Inputs for each block are resolved from previous block outputs - - Each block is executed by its specialized handler - - Outputs are stored in the execution context - - - - ### Path Determination - As execution progresses, the system determines which paths to follow: - - Router and conditional blocks make decisions about execution paths - - Only blocks on active paths are executed - - The path tracker maintains the current execution state - - - - ### Result Collection - After all blocks have executed: - - Final outputs are collected - - Execution logs are compiled - - Performance metrics are calculated - - Results are presented in the UI - - - -## Block Types and Execution - -Different block types have different execution behaviors: - - - - - Orchestration blocks control the flow of execution through your workflow. - - - } annotation="Initiates workflow execution and provides initial input values. Every workflow must have exactly one starter block." /> - } annotation="Directs execution along specific paths based on dynamic decisions. Uses an AI model to select one of multiple possible paths." /> - } annotation="Executes different paths based on conditional logic. Evaluates JavaScript expressions to determine which path to follow." /> - - - - - - - Processing blocks transform data and generate new outputs. - - - } annotation="Interacts with AI models to generate content. Executes prompts against various LLM providers." /> - } annotation="Executes custom JavaScript/TypeScript code. Runs in a secure sandbox environment with access to connected block outputs." /> - } annotation="Assesses outputs against defined criteria. Uses AI to evaluate content based on custom metrics." /> - - - - - - - Integration blocks connect with external systems. - - - } annotation="Makes HTTP requests to external services. Configurable with headers, body, and authentication." /> - } annotation="Specialized blocks for specific services (Gmail, Slack, GitHub, etc.). Each has its own execution logic for the specific service." /> - - - - - -## Execution Methods - -Sim Studio offers multiple ways to trigger workflow execution: - -### Manual Execution - -Run workflows on-demand through the Sim Studio interface by clicking the "Run" button. This is perfect for: -- Testing during development -- One-off tasks -- Workflows that need human supervision - -### Scheduled Execution - -Configure workflows to run automatically on a specified schedule: -- Set up recurring executions using cron expressions -- Define start times and frequency -- Configure timezone settings -- Set minimum and maximum execution intervals - -### API Endpoints - -Each workflow can be exposed as an API endpoint: -- Get a unique URL for your workflow -- Configure authentication requirements -- Send custom inputs via POST requests -- Receive execution results as JSON responses - -### Webhooks - -Configure workflows to execute in response to external events: -- Set up webhook triggers from third-party services -- Process incoming webhook data as workflow input -- Configure webhook security settings -- Support for specialized webhooks (GitHub, Stripe, etc.) - - - The execution method you choose depends on your workflow's purpose. Manual execution is great for development, while scheduled execution, API endpoints, and webhooks are better for production use cases. - - -## Execution Context - -Each workflow execution maintains a detailed context that includes: - -- **Block States**: Outputs and execution status of each block -- **Execution Path**: The active path through the workflow -- **Routing Decisions**: Records of which paths were selected -- **Environment Variables**: Configuration values for the workflow -- **Execution Logs**: Detailed records of each step in the execution - -This context is maintained throughout the execution and is used to: -- Resolve inputs for blocks -- Determine which blocks to execute next -- Track the progress of execution -- Provide debugging information -- Store intermediate results - -## Real-Time Execution Monitoring - -As your workflow executes, you can monitor its progress in real-time: - -- **Active Block Highlighting**: The currently executing block is highlighted -- **Live Logs**: Execution logs appear in real-time in the logs panel -- **Block States**: Visual indicators show each block's execution state -- **Performance Metrics**: Timing information for each block's execution - -These monitoring features help you understand how your workflow is executing and identify any issues that arise. \ No newline at end of file diff --git a/docs/content/docs/getting-started/index.mdx b/docs/content/docs/getting-started/index.mdx deleted file mode 100644 index 54dd5ad1b..000000000 --- a/docs/content/docs/getting-started/index.mdx +++ /dev/null @@ -1,67 +0,0 @@ ---- -title: Getting Started -description: Build, test, and optimize your agentic workflows ---- - -import { Card, Cards } from 'fumadocs-ui/components/card' -import { Steps, Step } from 'fumadocs-ui/components/steps' -import { Callout } from 'fumadocs-ui/components/callout' -import { Files, Folder, File } from 'fumadocs-ui/components/files' -import { Tabs, Tab } from 'fumadocs-ui/components/tabs' -import { AgentIcon, ApiIcon, ConditionalIcon, CodeIcon, ChartBarIcon, ConnectIcon, GmailIcon, PerplexityIcon, NotionIcon, ExaAIIcon, FirecrawlIcon, SlackIcon } from '@/components/icons' - -Sim Studio is a powerful, user-friendly platform for building, testing, and optimizing your agentic workflows. This documentation will help you understand how to use the various components of Sim Studio to create sophisticated agent-based applications. - - - This guide will walk you through the essential concepts and help you get started building your first workflow. - - -## Core Components - -Sim Studio is built around two primary components: - -### Blocks - -Blocks are the fundamental building elements of your workflows. Each block serves a specific purpose: - - - } annotation="Create AI agents using any LLM provider" /> - } annotation="Connect to external services and APIs" /> - } annotation="Add conditional branching to your workflows" /> - } annotation="Execute custom JavaScript/TypeScript code" /> - } annotation="Assess responses against defined criteria" /> - } annotation="Direct workflow execution based on input analysis" /> - - -### Tools - -Tools extend the capabilities of agents. They provide additional functionality for agents by enabling you to interface with your favorite data sources and take action (e.g posting on X, sending an email) - - - } /> - } /> - } /> - } /> - } /> - } /> - - -## Getting Started - - - - Start by creating a new workflow in the Sim Studio dashboard. - - - Drag and drop a block from the sidebar onto the canvas. - - - Set up the block's parameters and inputs according to your needs. - - - Create connections between blocks to define the flow of data and execution. - - - Run your workflow with test inputs to verify its behavior. - - \ No newline at end of file diff --git a/docs/content/docs/tools/github.mdx b/docs/content/docs/tools/github.mdx deleted file mode 100644 index 436ea9806..000000000 --- a/docs/content/docs/tools/github.mdx +++ /dev/null @@ -1,187 +0,0 @@ ---- -title: GitHub -description: Interact with GitHub ---- - -import { BlockInfoCard } from "@/components/ui/block-info-card" - - - - `} -/> - -{/* MANUAL-CONTENT-START:intro */} -[GitHub](https://github.com/) is the world's leading platform for software development and version control using Git. It provides a collaborative environment where developers can host and review code, manage projects, and build software together. - -With GitHub, you can: - -- **Host repositories**: Store your code in public or private repositories with version control -- **Collaborate on code**: Use pull requests to propose changes, review code, and merge contributions -- **Track issues**: Create, assign, and manage issues to organize work and track bugs -- **Automate workflows**: Use GitHub Actions to build, test, and deploy code automatically -- **Manage projects**: Organize work with project boards, milestones, and task tracking -- **Document code**: Create and maintain documentation with GitHub Pages and wikis - -In Sim Studio, the GitHub integration enables your agents to interact directly with GitHub repositories and workflows. This allows for powerful automation scenarios such as code review assistance, pull request management, issue tracking, and repository exploration. Your agents can fetch repository data, analyze code changes, post comments on pull requests, and perform other GitHub operations programmatically. This integration bridges the gap between your AI workflows and your development processes, enabling seamless collaboration between your agents and your development team. -{/* MANUAL-CONTENT-END */} - - -## Usage Instructions - -Access GitHub repositories, pull requests, and comments through the GitHub API. Automate code reviews, PR management, and repository interactions within your workflow. - - - -## Tools - -### `github_pr` - -Fetch PR details including diff and files changed - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `owner` | string | Yes | Repository owner | -| `repo` | string | Yes | Repository name | -| `pullNumber` | number | Yes | Pull request number | -| `apiKey` | string | Yes | GitHub API token | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `metadata` | string | -| `title` | string | -| `state` | string | -| `html_url` | string | -| `diff_url` | string | -| `created_at` | string | -| `updated_at` | string | -| `files` | string | -| `additions` | string | -| `deletions` | string | -| `changes` | string | -| `patch` | string | -| `blob_url` | string | -| `raw_url` | string | -| `status` | string | - -### `github_comment` - -Create comments on GitHub PRs - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `owner` | string | Yes | Repository owner | -| `repo` | string | Yes | Repository name | -| `pullNumber` | number | Yes | Pull request number | -| `body` | string | Yes | Comment content | -| `path` | string | No | File path for review comment | -| `position` | number | No | Line number for review comment | -| `apiKey` | string | Yes | GitHub API token | -| `commentType` | string | No | Type of comment \(pr_comment or file_comment\) | -| `line` | number | No | Line number for review comment | -| `side` | string | No | Side of the diff \(LEFT or RIGHT\) | -| `commitId` | string | No | The SHA of the commit to comment on | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `metadata` | string | -| `html_url` | string | -| `created_at` | string | -| `updated_at` | string | -| `path` | string | -| `line` | string | -| `side` | string | -| `commit_id` | string | - -### `github_repo_info` - -Retrieve comprehensive GitHub repository metadata including stars, forks, issues, and primary language. Supports both public and private repositories with optional authentication. - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `owner` | string | Yes | Repository owner \(user or organization\) | -| `repo` | string | Yes | Repository name | -| `apiKey` | string | Yes | GitHub Personal Access Token | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `metadata` | string | -| `description` | string | -| `stars` | string | -| `forks` | string | -| `openIssues` | string | -| `language` | string | - -### `github_latest_commit` - -Retrieve the latest commit from a GitHub repository - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `owner` | string | Yes | Repository owner \(user or organization\) | -| `repo` | string | Yes | Repository name | -| `branch` | string | No | Branch name \(defaults to the repository | -| `apiKey` | string | Yes | GitHub API token | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `metadata` | string | -| `html_url` | string | -| `commit_message` | string | -| `author` | string | -| `login` | string | -| `avatar_url` | string | - - - -## Block Configuration - -### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `operation` | string | Yes | Operation | - - - -### Outputs - -| Output | Type | Description | -| ------ | ---- | ----------- | -| `response` | object | Output from response | -| ↳ `content` | string | content of the response | -| ↳ `metadata` | json | metadata of the response | - - -## Notes - -- Category: `tools` -- Type: `github` diff --git a/docs/content/docs/tools/google_sheets.mdx b/docs/content/docs/tools/google_sheets.mdx deleted file mode 100644 index fdf43c68b..000000000 --- a/docs/content/docs/tools/google_sheets.mdx +++ /dev/null @@ -1,180 +0,0 @@ ---- -title: Google Sheets -description: Read, write, and update data ---- - -import { BlockInfoCard } from "@/components/ui/block-info-card" - - - - - - - `} -/> - -{/* MANUAL-CONTENT-START:intro */} -[Google Sheets](https://sheets.google.com) is a powerful cloud-based spreadsheet application that allows users to create, edit, and collaborate on spreadsheets in real-time. As part of Google's productivity suite, Google Sheets offers a versatile platform for data organization, analysis, and visualization with robust formatting, formula, and sharing capabilities. - -With Google Sheets, you can: - -- **Create and edit spreadsheets**: Develop data-driven documents with comprehensive formatting and calculation options -- **Collaborate in real-time**: Work simultaneously with multiple users on the same spreadsheet -- **Analyze data**: Use formulas, functions, and pivot tables to process and understand your data -- **Visualize information**: Create charts, graphs, and conditional formatting to represent data visually -- **Access anywhere**: Use Google Sheets across devices with automatic cloud synchronization -- **Work offline**: Continue working without internet connection with changes syncing when back online -- **Integrate with other services**: Connect with Google Drive, Forms, and third-party applications - -In Sim Studio, the Google Sheets integration enables your agents to interact directly with spreadsheet data programmatically. This allows for powerful automation scenarios such as data extraction, analysis, reporting, and management. Your agents can read existing spreadsheets to extract information, write to spreadsheets to update data, and create new spreadsheets from scratch. This integration bridges the gap between your AI workflows and data management, enabling seamless interaction with structured data. By connecting Sim Studio with Google Sheets, you can automate data workflows, generate reports, extract insights from data, and maintain up-to-date information - all through your intelligent agents. The integration supports various data formats and range specifications, making it flexible enough to handle diverse data management needs while maintaining the collaborative and accessible nature of Google Sheets. -{/* MANUAL-CONTENT-END */} - - -## Usage Instructions - -Integrate Google Sheets functionality to manage spreadsheet data. Read data from specific ranges, write new data, update existing cells, and append data to the end of sheets using OAuth authentication. Supports various input and output formats for flexible data handling. - - - -## Tools - -### `google_sheets_read` - -Read data from a Google Sheets spreadsheet - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | -| `spreadsheetId` | string | Yes | The ID of the spreadsheet to read from | -| `range` | string | No | The range of cells to read from | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `data` | json | - -### `google_sheets_write` - -Write data to a Google Sheets spreadsheet - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | -| `spreadsheetId` | string | Yes | The ID of the spreadsheet to write to | -| `range` | string | No | The range of cells to write to | -| `values` | array | Yes | The data to write to the spreadsheet | -| `valueInputOption` | string | No | The format of the data to write | -| `includeValuesInResponse` | boolean | No | Whether to include the written values in the response | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `updatedRange` | string | -| `updatedRows` | string | -| `updatedColumns` | string | -| `updatedCells` | string | -| `metadata` | string | -| `spreadsheetId` | string | -| `spreadsheetUrl` | string | - -### `google_sheets_update` - -Update data in a Google Sheets spreadsheet - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | -| `spreadsheetId` | string | Yes | The ID of the spreadsheet to update | -| `range` | string | No | The range of cells to update | -| `values` | array | Yes | The data to update in the spreadsheet | -| `valueInputOption` | string | No | The format of the data to update | -| `includeValuesInResponse` | boolean | No | Whether to include the updated values in the response | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `updatedRange` | string | -| `updatedRows` | string | -| `updatedColumns` | string | -| `updatedCells` | string | -| `metadata` | string | -| `spreadsheetId` | string | -| `spreadsheetUrl` | string | - -### `google_sheets_append` - -Append data to the end of a Google Sheets spreadsheet - -#### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `accessToken` | string | Yes | The access token for the Google Sheets API | -| `spreadsheetId` | string | Yes | The ID of the spreadsheet to append to | -| `range` | string | No | The range of cells to append after | -| `values` | array | Yes | The data to append to the spreadsheet | -| `valueInputOption` | string | No | The format of the data to append | -| `insertDataOption` | string | No | How to insert the data \(OVERWRITE or INSERT_ROWS\) | -| `includeValuesInResponse` | boolean | No | Whether to include the appended values in the response | - -#### Output - -| Parameter | Type | -| --------- | ---- | -| `data` | json | - - - -## Block Configuration - -### Input - -| Parameter | Type | Required | Description | -| --------- | ---- | -------- | ----------- | -| `operation` | string | Yes | Operation | - - - -### Outputs - -| Output | Type | Description | -| ------ | ---- | ----------- | -| `response` | object | Output from response | -| ↳ `data` | json | data of the response | -| ↳ `metadata` | json | metadata of the response | -| ↳ `updatedRange` | string | updatedRange of the response | -| ↳ `updatedRows` | number | updatedRows of the response | -| ↳ `updatedColumns` | number | updatedColumns of the response | -| ↳ `updatedCells` | number | updatedCells of the response | -| ↳ `tableRange` | string | tableRange of the response | - - -## Notes - -- Category: `tools` -- Type: `google_sheets` diff --git a/docs/content/docs/tools/jira.mdx b/docs/content/docs/tools/jira.mdx deleted file mode 100644 index ddd5f834c..000000000 --- a/docs/content/docs/tools/jira.mdx +++ /dev/null @@ -1,182 +0,0 @@ ---- -title: Jira -description: Interact with Jira ---- - -import { BlockInfoCard } from "@/components/ui/block-info-card" - -

          - {/* Grid pattern background - only covers content area */} -
          -
          - - {/* Header/Navigation */} - - - {/* SVG background blur centered behind content */} - - - {/* Content */} -
          -
          -
          -

          Privacy Policy

          - -
          -
          -

          Last Updated: April 22, 2025

          -

          - This Privacy Policy describes how your personal information is collected, used, and - shared when you visit or use Sim Studio ("the Service", "we", "us", or "our"). -

          -

          - By using the Service, you agree to the collection and use of information in - accordance with this policy. Unless otherwise defined in this Privacy Policy, terms - used in this Privacy Policy have the same meanings as in our Terms of Service. -

          -
          - -
          -

          - Interpretation and Definitions -

          -

          Interpretation

          -

          - Under the following conditions, the meanings of words with capitalized first letters are defined. - The following definitions have the same meaning whether they are written in singular or plural form. -

          - -

          Definitions

          -

          - For the purposes of this Privacy Policy: -

          -
            -
          • Account means a unique account created for You to access our Service or parts of our Service.
          • -
          • Affiliate means an entity that controls, is controlled by or is under common control with a party, where "control" means ownership of 50% or more of the shares, equity interest or other securities entitled to vote for election of directors or other managing authority.
          • -
          • Application means the software program provided by the Company downloaded by You on any electronic device.
          • -
          • Business, for the purpose of the CCPA (California Consumer Privacy Act), refers to the Company as the legal entity that collects Consumers' personal information and determines the purposes and means of the processing of Consumers' personal information, or on behalf of which such information is collected and that alone, or jointly with others, determines the purposes and means of the processing of consumers' personal information, that does business in the State of California.
          • -
          • Company (referred to as either "the Company", "We", "Us" or "Our" in this Agreement) refers to Sim Studio. For the purpose of the GDPR, the Company is the Data Controller.
          • -
          • Cookies are small files that are placed on Your computer, mobile device or any other device by a website, containing the details of Your browsing history on that website among its many uses.
          • -
          • Country refers to: Quebec, Canada
          • -
          • Data Controller, for the purposes of the GDPR (General Data Protection Regulation), refers to the Company as the legal person which alone or jointly with others determines the purposes and means of the processing of Personal Data.
          • -
          • Device means any device that can access the Service such as a computer, a cellphone or a digital tablet.
          • -
          • Do Not Track (DNT) is a concept that has been promoted by US regulatory authorities, in particular the U.S. Federal Trade Commission (FTC), for the Internet industry to develop and implement a mechanism for allowing internet users to control the tracking of their online activities across websites.
          • -
          • Personal Data is any information that relates to an identified or identifiable individual. For the purposes for GDPR, Personal Data means any information relating to You such as a name, an identification number, location data, online identifier or to one or more factors specific to the physical, physiological, genetic, mental, economic, cultural or social identity. For the purposes of the CCPA, Personal Data means any information that identifies, relates to, describes or is capable of being associated with, or could reasonably be linked, directly or indirectly, with You.
          • -
          • Sale, for the purpose of the CCPA (California Consumer Privacy Act), means selling, renting, releasing, disclosing, disseminating, making available, transferring, or otherwise communicating orally, in writing, or by electronic or other means, a Consumer's Personal information to another business or a third party for monetary or other valuable consideration.
          • -
          • Service refers to the Application or the Website or both.
          • -
          • Service Provider means any natural or legal person who processes the data on behalf of the Company. It refers to third-party companies or individuals employed by the Company to facilitate the Service, to provide the Service on behalf of the Company, to perform services related to the Service or to assist the Company in analyzing how the Service is used. For the purpose of the GDPR, Service Providers are considered Data Processors.
          • -
          • Third-party Social Media Service refers to any website or any social network website through which a User can log in or create an account to use the Service.
          • -
          • Usage Data refers to data collected automatically, either generated by the use of the Service or from the Service infrastructure itself (for example, the duration of a page visit).
          • -
          • Website refers to Sim Studio, accessible from simstudio.ai
          • -
          • You means the individual accessing or using the Service, or the company, or other legal entity on behalf of which such individual is accessing or using the Service, as applicable. Under GDPR (General Data Protection Regulation), You can be referred to as the Data Subject or as the User as you are the individual using the Service.
          • -
          -
          - -
          -

          - 1. Information We Collect -

          -

          Personal Information

          -

          - While using our Service, we may ask you to provide us with certain personally - identifiable information that can be used to contact or identify you ("Personal Information"). Personally identifiable information may include, but is not limited - to: -

          -
            -
          • Email address
          • -
          • First name and last name
          • -
          • Phone number
          • -
          • Address, State, Province, ZIP/Postal code, City
          • -
          • Cookies and Usage Data
          • -
          - -

          Usage Data

          -

          - We may also collect information on how the Service is accessed and used ("Usage - Data"). This Usage Data may include information such as your computer's Internet - Protocol address (e.g. IP address), browser type, browser version, the pages of our - Service that you visit, the time and date of your visit, the time spent on those - pages, unique device identifiers and other diagnostic data. -

          -

          - When You access the Service by or through a mobile device, We may collect certain information automatically, - including, but not limited to, the type of mobile device You use, Your mobile device unique ID, the IP - address of Your mobile device, Your mobile operating system, the type of mobile Internet browser You use, - unique device identifiers and other diagnostic data. -

          -

          - We may also collect information that Your browser sends whenever You visit our Service or when You access - the Service by or through a mobile device. -

          - -

          Tracking & Cookies Data

          -

          - We use cookies and similar tracking technologies to track the activity on our Service - and hold certain information. -

          -

          - Cookies are files with small amount of data which may include an anonymous unique - identifier. Cookies are sent to your browser from a website and stored on your - device. Tracking technologies also used are beacons, tags, and scripts to collect and - track information and to improve and analyze our Service. -

          -

          - You can instruct your browser to refuse all cookies or to indicate when a cookie is - being sent. However, if you do not accept cookies, you may not be able to use some - portions of our Service. -

          -
          - -
          -

          - 2. How We Use Your Information -

          -

          - We use the collected data for various purposes: -

          -
            -
          • To provide and maintain our Service
          • -
          • To notify you about changes to our Service
          • -
          • - To allow you to participate in interactive features of our Service when you choose - to do so -
          • -
          • To provide customer care and support
          • -
          • To provide analysis or valuable information so that we can improve the Service
          • -
          • To monitor the usage of the Service
          • -
          • To detect, prevent and address technical issues
          • -
          • To manage Your Account
          • -
          • For the performance of a contract
          • -
          • To contact You by email, telephone calls, SMS, or other equivalent forms of electronic communication
          • -
          -
          - -
          -

          - 3. Transfer Of Data -

          -

          - Your information, including Personal Information, may be transferred to β€” and - maintained on β€” computers located outside of your state, province, country or other - governmental jurisdiction where the data protection laws may differ than those from - your jurisdiction. -

          -

          - If you are located outside United States and choose to provide information to us, - please note that we transfer the data, including Personal Information, to United - States and process it there. -

          -

          - Your consent to this Privacy Policy followed by your submission of such information - represents your agreement to that transfer. -

          -
          - -
          -

          - 4. Disclosure Of Data -

          - -

          Business Transactions

          -

          - If the Company is involved in a merger, acquisition or asset sale, Your Personal Data may be transferred. - We will provide notice before Your Personal Data is transferred and becomes subject to a different Privacy Policy. -

          - -

          Law Enforcement

          -

          - Under certain circumstances, the Company may be required to disclose Your Personal Data - if required to do so by law or in response to valid requests by public authorities (e.g. a court or a government agency). -

          - -

          Legal Requirements

          -

          - Sim Studio may disclose your Personal Information in the good faith belief that such - action is necessary to: -

          -
            -
          • To comply with a legal obligation
          • -
          • To protect and defend the rights or property of Sim Studio
          • -
          • To prevent or investigate possible wrongdoing in connection with the Service
          • -
          • To protect the personal safety of users of the Service or the public
          • -
          • To protect against legal liability
          • -
          -
          - -
          -

          - 5. Security Of Data -

          -

          - The security of your data is important to us, but remember that no method of - transmission over the Internet, or method of electronic storage is 100% secure. While - we strive to use commercially acceptable means to protect your Personal Information, - we cannot guarantee its absolute security. -

          -
          - -
          -

          - 6. Service Providers -

          -

          - We may employ third party companies and individuals to facilitate our Service - ("Service Providers"), to provide the Service on our behalf, to perform - Service-related services or to assist us in analyzing how our Service is used. -

          -

          - These third parties have access to your Personal Information only to perform these - tasks on our behalf and are obligated not to disclose or use it for any other - purpose. -

          -
          - -
          -

          - 7. Analytics -

          -

          - We may use third-party Service Providers to monitor and analyze the use of our - Service. -

          -

          Google Analytics

          -

          - Google Analytics is a web analytics service offered by Google that tracks and reports - website traffic. Google uses the data collected to track and monitor the use of our - Service. This data is shared with other Google services. Google may use the collected - data to contextualize and personalize the ads of its own advertising network. -

          -

          - You can opt-out of having made your activity on the Service available to Google - Analytics by installing the Google Analytics opt-out browser add-on. The add-on - prevents the Google Analytics JavaScript (ga.js, analytics.js, and dc.js) from - sharing information with Google Analytics about visits activity. -

          -

          - For more information on the privacy practices of Google, please visit the Google - Privacy & Terms web page:{' '} - - https://policies.google.com/privacy - -

          -
          - -
          -

          - 8. Behavioral Remarketing -

          -

          - The Company uses remarketing services to advertise on third party websites to You after You visited our Service. - We and Our third-party vendors use cookies to inform, optimize and serve ads based on Your past visits to our Service. -

          -

          Google Ads (AdWords)

          -

          - Google Ads remarketing service is provided by Google Inc. You can opt-out of Google Analytics for - Display Advertising and customize the Google Display Network ads by visiting the Google Ads Settings page. -

          - -

          Twitter

          -

          - Twitter remarketing service is provided by Twitter Inc. You can opt-out from Twitter's - interest-based ads by following their instructions. -

          - -

          Facebook

          -

          - Facebook remarketing service is provided by Facebook Inc. You can learn more about - interest-based advertising from Facebook by visiting their Privacy Policy. -

          -
          - -
          -

          - 9. Payments -

          -

          - We may provide paid products and/or services within the Service. In that case, we may use - third-party services for payment processing (e.g. payment processors). -

          -

          - We will not store or collect Your payment card details. That information is provided directly - to Our third-party payment processors whose use of Your personal information is governed by - their Privacy Policy. These payment processors adhere to the standards set by PCI-DSS as managed - by the PCI Security Standards Council, which is a joint effort of brands like Visa, Mastercard, - American Express and Discover. PCI-DSS requirements help ensure the secure handling of payment information. -

          -

          Payment processors we work with:

          -
            -
          • Stripe
          • -
          -
          - -
          -

          - 10. Google Workspace APIs -

          -

          - We want to explicitly affirm that any user data obtained through Google Workspace APIs is not used to develop, improve, or train generalized AI and/or machine learning models. We use data obtained through Google Workspace APIs solely for the purpose of providing and improving the specific functionality of our Service for which the API access was granted. -

          -
          - -
          -

          - 11. Information Collected while Using Google APIs -

          -

          - Sim Studio's use and transfer to any other app of information received from Google APIs will adhere to - Google API Services User Data Policy, including the Limited Use requirements. -

          -
          - -
          -

          - 12. Links To Other Sites -

          -

          - Our Service may contain links to other sites that are not operated by us. If you - click on a third party link, you will be directed to that third party's site. We - strongly advise you to review the Privacy Policy of every site you visit. -

          -

          - We have no control over and assume no responsibility for the content, privacy - policies or practices of any third party sites or services. -

          -
          - -
          -

          - 13. Children's Privacy -

          -

          - Our Service does not address anyone under the age of 18 ("Children"). -

          -

          - We do not knowingly collect personally identifiable information from anyone under the - age of 18. If you are a parent or guardian and you are aware that your Children has - provided us with Personal Information, please contact us. If we become aware that we - have collected Personal Information from children without verification of parental - consent, we take steps to remove that information from our servers. -

          -
          - -
          -

          - 14. Changes To This Privacy Policy -

          -

          - We may update our Privacy Policy from time to time. We will notify you of any changes - by posting the new Privacy Policy on this page. -

          -

          - We will let you know via email and/or a prominent notice on our Service, prior to the - change becoming effective and update the "Last updated" date at the top of this - Privacy Policy. -

          -

          - You are advised to review this Privacy Policy periodically for any changes. Changes - to this Privacy Policy are effective when they are posted on this page. -

          -
          - -
          -

          - 15. Your Data Protection Rights Under General Data Protection Regulation (GDPR) -

          -

          - If you are a resident of the European Economic Area (EEA), you have certain data - protection rights. Sim Studio aims to take reasonable steps to allow you to correct, - amend, delete, or limit the use of your Personal Information. -

          -

          - If you wish to be informed what Personal Information we hold about you and if you - want it to be removed from our systems, please contact us. -

          -

          - In certain circumstances, you have the following data protection rights: -

          -
            -
          • - The right to access, update or to delete the information we have on you. -
          • -
          • - The right of rectification. You have the right to have your information rectified - if that information is inaccurate or incomplete. -
          • -
          • - The right to object. You have the right to object to our processing of your - Personal Information. -
          • -
          • - The right of restriction. You have the right to request that we restrict the - processing of your personal information. -
          • -
          • - The right to data portability. You have the right to be provided with a copy of the - information we have on you in a structured, machine-readable and commonly used - format. -
          • -
          • - The right to withdraw consent. You also have the right to withdraw your consent at - any time where Sim Studio relied on your consent to process your personal - information. -
          • -
          -

          - Please note that we may ask you to verify your identity before responding to such - requests. -

          -

          - You have the right to complain to a Data Protection Authority about our collection - and use of your Personal Information. For more information, please contact your local - data protection authority in the European Economic Area (EEA). -

          -
          - -
          -

          - 16. California Privacy Rights -

          -

          - California Civil Code Section 1798.83, also known as the "Shine The Light" law, - permits our users who are California residents to request and obtain from us, once a - year and free of charge, information about categories of personal information (if - any) we disclosed to third parties for direct marketing purposes and the names and - addresses of all third parties with which we shared personal information in the - immediately preceding calendar year. -

          -

          - If you are a California resident and would like to make such a request, please submit - your request in writing to us using the contact information provided below. -

          -
          - -
          -

          - 17. Vulnerability Disclosure Policy -

          -

          Introduction

          -

          - Sim Studio is dedicated to preserving data security by preventing unauthorized disclosure of information. - This policy was created to provide security researchers with instructions for conducting vulnerability - discovery activities and to provide information on how to report vulnerabilities that have been discovered. - This policy explains which systems and sorts of activity are covered, how to send vulnerability reports, - and how long we require you to wait before publicly reporting vulnerabilities identified. -

          - -

          Guidelines

          -

          We request that you:

          -
            -
          • Notify us as soon as possible after you discover a real or potential security issue.
          • -
          • Provide us a reasonable amount of time to resolve the issue before you disclose it publicly.
          • -
          • Make every effort to avoid privacy violations, degradation of user experience, disruption to production systems, and destruction or manipulation of data.
          • -
          • Only use exploits to the extent necessary to confirm a vulnerability's presence. Do not use an exploit to compromise or obtain data, establish command line access and/or persistence, or use the exploit to "pivot" to other systems.
          • -
          • Once you've established that a vulnerability exists or encounter any sensitive data (including personal data, financial information, or proprietary information or trade secrets of any party), you must stop your test, notify us immediately, and keep the data strictly confidential.
          • -
          • Do not submit a high volume of low-quality reports.
          • -
          - -

          Authorization

          -

          - Security research carried out in conformity with this policy is deemed permissible. We'll work with you to - swiftly understand and fix the problem, and Sim Studio will not suggest or pursue legal action in connection with your study. -

          - -

          Scope

          -

          This policy applies to the following systems and services:

          -
            -
          • simstudio.ai website
          • -
          • Sim Studio web application
          • -
          • Sim Studio API services
          • -
          -

          - Any service that isn't explicitly specified above, such as related services, is out of scope and isn't allowed - to be tested. Vulnerabilities discovered in third-party solutions Sim Studio interacts with are not covered by - this policy and should be reported directly to the solution vendor in accordance with their disclosure policy (if any). - Before beginning your inquiry, email us at security@simstudio.ai if you're unsure whether a system or endpoint is in scope. -

          - -

          Types of testing

          -

          The following test types are not authorized:

          -
            -
          • Network denial of service (DoS or DDoS) tests
          • -
          • Physical testing (e.g., office access, open doors, tailgating), social engineering (e.g., phishing, vishing), or any other non-technical vulnerability testing
          • -
          - -

          Reporting a vulnerability

          -

          - To report any security flaws, send an email to security@simstudio.ai. - The next business day, we'll acknowledge receipt of your vulnerability report and keep you updated on our progress. - Reports can be anonymously submitted. -

          - -

          Desirable information

          -

          In order to process and react to a vulnerability report, we recommend to include the following information:

          -
            -
          • Vulnerability description
          • -
          • Place of discovery
          • -
          • Potential Impact
          • -
          • Steps required to reproduce a vulnerability (include scripts and screenshots if possible)
          • -
          -

          If possible, please provide your report in English.

          - -

          Our commitment

          -

          - If you choose to give your contact information, we promise to communicate with you in a transparent and timely manner. - We will acknowledge receipt of your report within three business days. We will keep you informed on vulnerability - confirmation and remedy to the best of our capabilities. We welcome a discussion of concerns and are willing - to engage in a discourse. -

          -
          - -
          -

          18. Contact Us

          -

          - If you have any questions about this Privacy Policy, please contact us at:{' '} - - privacy@simstudio.ai - -

          -
          -
          -
          -
          -
          - - {/* Footer */} -
          -
          -
          -
          - ) -} diff --git a/sim/next.config.ts b/sim/next.config.ts deleted file mode 100644 index a32a595f6..000000000 --- a/sim/next.config.ts +++ /dev/null @@ -1,135 +0,0 @@ -import { withSentryConfig } from '@sentry/nextjs'; -import type { NextConfig } from 'next' -import path from 'path' - -const nextConfig: NextConfig = { - devIndicators: false, - images: { - domains: [ - 'avatars.githubusercontent.com', - 'oaidalleapiprodscus.blob.core.windows.net', - 'api.stability.ai', - ] - }, - output: 'standalone', - typescript: { - ignoreBuildErrors: true, - }, - turbopack: { - resolveExtensions: ['.tsx', '.ts', '.jsx', '.js', '.mjs', '.json'], - }, - experimental: { - optimizeCss: true, - }, - webpack: (config, { isServer, dev }) => { - // Skip webpack configuration in development when using Turbopack - if (dev && process.env.NEXT_RUNTIME === 'turbopack') { - return config - } - - // Configure webpack to use filesystem cache for faster incremental builds - if (config.cache) { - config.cache = { - type: 'filesystem', - buildDependencies: { - config: [__filename] - }, - cacheDirectory: path.resolve(process.cwd(), '.next/cache/webpack') - } - } - - return config - }, - transpilePackages: [ - 'prettier', - '@react-email/components', - '@react-email/render' - ], - // Only include headers when not building for standalone export - async headers() { - return [ - { - // API routes CORS headers - source: '/api/:path*', - headers: [ - { key: 'Access-Control-Allow-Credentials', value: 'true' }, - { - key: 'Access-Control-Allow-Origin', - value: 'https://localhost:3001', - }, - { - key: 'Access-Control-Allow-Methods', - value: 'GET,POST,OPTIONS,PUT,DELETE', - }, - { - key: 'Access-Control-Allow-Headers', - value: - 'X-CSRF-Token, X-Requested-With, Accept, Accept-Version, Content-Length, Content-MD5, Content-Type, Date, X-Api-Version', - }, - ], - }, - { - // Apply Cross-Origin Isolation headers to all routes except those that use the Google Drive Picker - source: '/((?!w/.*|api/auth/oauth/drive).*)', - headers: [ - { - key: 'Cross-Origin-Embedder-Policy', - value: 'require-corp', - }, - { - key: 'Cross-Origin-Opener-Policy', - value: 'same-origin', - }, - ], - }, - { - // For routes that use the Google Drive Picker, only apply COOP but not COEP - source: '/(w/.*|api/auth/oauth/drive)', - headers: [ - { - key: 'Cross-Origin-Opener-Policy', - value: 'same-origin', - }, - ], - }, - // Apply security headers to all routes - { - source: '/:path*', - headers: [ - { - key: 'X-Content-Type-Options', - value: 'nosniff', - }, - { - key: 'X-Frame-Options', - value: 'SAMEORIGIN', - }, - { - key: 'Content-Security-Policy', - value: `default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://*.vercel-insights.com https://vercel.live https://*.vercel.live; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com; media-src 'self' blob:; font-src 'self' https://fonts.gstatic.com; connect-src 'self' ${process.env.OLLAMA_HOST || 'http://localhost:11434'} https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.vercel-insights.com https://*.atlassian.com https://vercel.live https://*.vercel.live; frame-src https://drive.google.com https://*.google.com; frame-ancestors 'self'; form-action 'self'; base-uri 'self'; object-src 'none'`, - }, - ], - }, - ] - }, -} - -const sentryConfig = { - silent: true, - org: process.env.SENTRY_ORG || '', - project: process.env.SENTRY_PROJECT || '', - authToken: process.env.SENTRY_AUTH_TOKEN || undefined, - disableSourceMapUpload: process.env.NODE_ENV !== 'production', - autoInstrumentServerFunctions: process.env.NODE_ENV === 'production', - bundleSizeOptimizations: { - excludeDebugStatements: true, - excludePerformanceMonitoring: true, - excludeReplayIframe: true, - excludeReplayShadowDom: true, - excludeReplayWorker: true, - }, -} - -export default process.env.NODE_ENV === 'development' - ? nextConfig - : withSentryConfig(nextConfig, sentryConfig) \ No newline at end of file diff --git a/sim/scripts/generate_migrations.sh b/sim/scripts/generate_migrations.sh deleted file mode 100644 index ddf6666be..000000000 --- a/sim/scripts/generate_migrations.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -# Generate migrations using Drizzle -echo "Generating database migrations..." -docker compose exec simstudio npm run db:push - -echo "Migrations generated successfully." \ No newline at end of file diff --git a/sim/scripts/setup_cli.sh b/sim/scripts/setup_cli.sh deleted file mode 100644 index c439d24bc..000000000 --- a/sim/scripts/setup_cli.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash - -set -e - -echo "Setting up Sim Studio CLI Package..." - -# Create directory structure if it doesn't exist -mkdir -p packages/@simstudio/cli/bin -mkdir -p packages/@simstudio/cli/src/commands -mkdir -p packages/@simstudio/cli/src/utils - -# Navigate to CLI directory -cd packages/@simstudio/cli - -# Install dependencies -echo "Installing CLI dependencies..." -npm install - -# Build the CLI package -echo "Building CLI package..." -npm run build - -# Make the CLI executable -chmod +x bin/sim.js - -echo "βœ… CLI setup complete!" -echo "" -echo "You can now run:" -echo " npm run cli:start - to test the CLI" -echo " npm run cli:dev - to develop the CLI with live reload" -echo " npm run cli:publish - to publish to npm" -echo "" -echo "Try it out with: ./packages/@simstudio/cli/bin/sim.js" \ No newline at end of file diff --git a/sim/tailwind.config.ts b/sim/tailwind.config.ts deleted file mode 100644 index 40fc09d30..000000000 --- a/sim/tailwind.config.ts +++ /dev/null @@ -1,171 +0,0 @@ -import type { Config } from 'tailwindcss' - -export default { - darkMode: ['class'], - content: [ - './pages/**/*.{js,ts,jsx,tsx,mdx}', - './components/**/*.{js,ts,jsx,tsx,mdx}', - './app/**/*.{js,ts,jsx,tsx,mdx}', - '!./app/node_modules/**', - '!**/node_modules/**', - ], - theme: { - extend: { - colors: { - background: 'hsl(var(--background))', - foreground: 'hsl(var(--foreground))', - card: { - DEFAULT: 'hsl(var(--card))', - foreground: 'hsl(var(--card-foreground))' - }, - popover: { - DEFAULT: 'hsl(var(--popover))', - foreground: 'hsl(var(--popover-foreground))' - }, - primary: { - DEFAULT: 'hsl(var(--primary))', - foreground: 'hsl(var(--primary-foreground))' - }, - secondary: { - DEFAULT: 'hsl(var(--secondary))', - foreground: 'hsl(var(--secondary-foreground))' - }, - muted: { - DEFAULT: 'hsl(var(--muted))', - foreground: 'hsl(var(--muted-foreground))' - }, - accent: { - DEFAULT: 'hsl(var(--accent))', - foreground: 'hsl(var(--accent-foreground))' - }, - destructive: { - DEFAULT: 'hsl(var(--destructive))', - foreground: 'hsl(var(--destructive-foreground))' - }, - border: 'hsl(var(--border))', - input: 'hsl(var(--input))', - ring: 'hsl(var(--ring))', - chart: { - '1': 'hsl(var(--chart-1))', - '2': 'hsl(var(--chart-2))', - '3': 'hsl(var(--chart-3))', - '4': 'hsl(var(--chart-4))', - '5': 'hsl(var(--chart-5))' - } - }, - borderRadius: { - lg: 'var(--radius)', - md: 'calc(var(--radius) - 2px)', - sm: 'calc(var(--radius) - 4px)' - }, - transitionProperty: { - 'width': 'width', - 'left': 'left', - 'padding': 'padding', - }, - keyframes: { - 'slide-down': { - '0%': { - transform: 'translate(-50%, -100%)', - opacity: '0' - }, - '100%': { - transform: 'translate(-50%, 0)', - opacity: '1' - } - }, - 'notification-slide': { - '0%': { - opacity: '0', - transform: 'translateY(-100%)' - }, - '100%': { - opacity: '1', - transform: 'translateY(0)' - } - }, - 'notification-fade-out': { - '0%': { - opacity: '1', - transform: 'translateY(0)' - }, - '100%': { - opacity: '0', - transform: 'translateY(0)' - } - }, - 'fade-up': { - '0%': { - opacity: '0', - transform: 'translateY(10px)' - }, - '100%': { - opacity: '1', - transform: 'translateY(0)' - } - }, - 'rocket-pulse': { - '0%, 100%': { - opacity: '1' - }, - '50%': { - opacity: '0.7' - } - }, - 'run-glow': { - '0%, 100%': { - filter: 'opacity(1)' - }, - '50%': { - filter: 'opacity(0.7)' - } - }, - 'caret-blink': { - '0%,70%,100%': { - opacity: '1' - }, - '20%,50%': { - opacity: '0' - } - }, - 'pulse-slow': { - '0%, 100%': { - opacity: '1' - }, - '50%': { - opacity: '0.7' - } - }, - 'accordion-down': { - from: { - height: '0' - }, - to: { - height: 'var(--radix-accordion-content-height)' - } - }, - 'accordion-up': { - from: { - height: 'var(--radix-accordion-content-height)' - }, - to: { - height: '0' - } - } - }, - animation: { - 'slide-down': 'slide-down 0.3s ease-out', - 'notification-slide': 'notification-slide 0.3s ease-out forwards', - 'notification-fade-out': 'notification-fade-out 0.2s ease-out forwards', - 'fade-up': 'fade-up 0.5s ease-out forwards', - 'rocket-pulse': 'rocket-pulse 1.5s ease-in-out infinite', - 'run-glow': 'run-glow 2s ease-in-out infinite', - 'caret-blink': 'caret-blink 1.25s ease-out infinite', - 'pulse-slow': 'pulse-slow 3s ease-in-out infinite', - 'accordion-down': 'accordion-down 0.2s ease-out', - 'accordion-up': 'accordion-up 0.2s ease-out' - } - } - }, - plugins: [require('tailwindcss-animate'), require('@tailwindcss/typography')], -} satisfies Config diff --git a/sim/tools/clay/types.ts b/sim/tools/clay/types.ts deleted file mode 100644 index e107ea7ba..000000000 --- a/sim/tools/clay/types.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { ToolResponse } from "../types" - -export interface ClayPopulateParams { - webhookURL: string - data: JSON - authToken?: string -} - -export interface ClayPopulateResponse extends ToolResponse { - output: { - data: any - } -} \ No newline at end of file diff --git a/sim/tools/confluence/utils.ts b/sim/tools/confluence/utils.ts deleted file mode 100644 index 307aacd29..000000000 --- a/sim/tools/confluence/utils.ts +++ /dev/null @@ -1,33 +0,0 @@ -export async function getConfluenceCloudId(domain: string, accessToken: string): Promise { - try { - const response = await fetch('https://api.atlassian.com/oauth/token/accessible-resources', { - method: 'GET', - headers: { - 'Authorization': `Bearer ${accessToken}`, - 'Accept': 'application/json' - } - }) - - const resources = await response.json() - - // If we have resources, find the matching one - if (Array.isArray(resources) && resources.length > 0) { - const normalizedInput = `https://${domain}`.toLowerCase() - const matchedResource = resources.find(r => r.url.toLowerCase() === normalizedInput) - - if (matchedResource) { - return matchedResource.id - } - } - - // If we couldn't find a match, return the first resource's ID - // This is a fallback in case the URL matching fails - if (Array.isArray(resources) && resources.length > 0) { - return resources[0].id - } - - throw new Error('No Confluence resources found') - } catch (error) { - throw error - } -} \ No newline at end of file diff --git a/sim/tools/elevenlabs/types.ts b/sim/tools/elevenlabs/types.ts deleted file mode 100644 index 80cc3380f..000000000 --- a/sim/tools/elevenlabs/types.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { ToolResponse } from "../types" - -export interface ElevenLabsTtsParams { - apiKey: string - text: string - voiceId: string - modelId?: string - } - - export interface ElevenLabsTtsResponse extends ToolResponse { - output: { - audioUrl: string - } - } - \ No newline at end of file diff --git a/sim/tools/firecrawl/types.ts b/sim/tools/firecrawl/types.ts deleted file mode 100644 index 54a3a8215..000000000 --- a/sim/tools/firecrawl/types.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { ToolResponse } from "../types" - -export interface ScrapeParams { - apiKey: string - url: string - scrapeOptions?: { - onlyMainContent?: boolean - formats?: string[] - } - } - - export interface ScrapeResponse extends ToolResponse { - output: { - markdown: string - html?: string - metadata: { - title: string - description: string - language: string - keywords: string - robots: string - ogTitle: string - ogDescription: string - ogUrl: string - ogImage: string - ogLocaleAlternate: string[] - ogSiteName: string - sourceURL: string - statusCode: number - } - } - } \ No newline at end of file diff --git a/sim/tools/guesty/types.ts b/sim/tools/guesty/types.ts deleted file mode 100644 index 69cbe37c7..000000000 --- a/sim/tools/guesty/types.ts +++ /dev/null @@ -1,47 +0,0 @@ -import { ToolResponse } from "../types" - -export interface GuestyGuestParams { - apiKey: string - phoneNumber: string - } - - export interface GuestyGuestResponse extends ToolResponse { - output: { - guests: Array<{ - id: string - fullName: string - email: string - phone: string - address: string - city: string - country: string - }> - } - } - - export interface GuestyReservationParams { - apiKey: string - reservationId: string - } - - export interface GuestyReservationResponse extends ToolResponse { - output: { - id: string - guest: { - fullName: string - email: string - phone: string - } - checkIn: string - checkOut: string - status: string - listing: { - id: string - title: string - } - money: { - totalPaid: number - currency: string - } - } - } \ No newline at end of file diff --git a/sim/tools/http/types.ts b/sim/tools/http/types.ts deleted file mode 100644 index 99c272b69..000000000 --- a/sim/tools/http/types.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { ToolResponse } from "../types" -import { HttpMethod } from "../types" -import { TableRow } from "../types" - -export interface RequestParams { - url: string - method?: HttpMethod - headers?: TableRow[] - body?: any - params?: TableRow[] - pathParams?: Record - formData?: Record - timeout?: number - validateStatus?: (status: number) => boolean - } - - export interface RequestResponse extends ToolResponse { - output: { - data: any - status: number - headers: Record - } - } \ No newline at end of file diff --git a/sim/tools/jina/types.ts b/sim/tools/jina/types.ts deleted file mode 100644 index e85c946ea..000000000 --- a/sim/tools/jina/types.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { ToolResponse } from "../types" - -export interface ReadUrlParams { - url: string - useReaderLMv2?: boolean - gatherLinks?: boolean - jsonResponse?: boolean - apiKey?: string - } - - export interface ReadUrlResponse extends ToolResponse { - output: { - content: string - } - } \ No newline at end of file diff --git a/sim/tools/jira/bulk_read.ts b/sim/tools/jira/bulk_read.ts deleted file mode 100644 index b2c05146c..000000000 --- a/sim/tools/jira/bulk_read.ts +++ /dev/null @@ -1,204 +0,0 @@ -import { ToolConfig } from '../types' - import { JiraRetrieveBulkParams, JiraRetrieveResponseBulk } from './types' - - export const jiraBulkRetrieveTool: ToolConfig = { - id: 'jira_bulk_read', - name: 'Jira Bulk Read', - description: 'Retrieve multiple Jira issues in bulk', - version: '1.0.0', - oauth: { - required: true, - provider: 'jira', - additionalScopes: [ - 'read:jira-work', - 'read:jira-user', - 'read:me', - 'offline_access', - ], - }, - params: { - accessToken: { - type: 'string', - required: true, - description: 'OAuth access token for Jira', - }, - domain: { - type: 'string', - required: true, - requiredForToolCall: true, - description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', - }, - projectId: { - type: 'string', - required: true, - description: 'Jira project ID', - }, - cloudId: { - type: 'string', - required: false, - description: 'Jira cloud ID', - }, - }, - request: { - url: (params: JiraRetrieveBulkParams) => { - if (params.cloudId) { - return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/picker?currentJQL=project=${params.projectId}` - } - // If no cloudId, use the accessible resources endpoint - return 'https://api.atlassian.com/oauth/token/accessible-resources' - }, - method: 'GET', - headers: (params: JiraRetrieveBulkParams) => ({ - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json' - }), - body: (params: JiraRetrieveBulkParams) => ({}) - }, - transformResponse: async (response: Response, params?: JiraRetrieveBulkParams) => { - if (!params) { - throw new Error('Parameters are required for Jira bulk issue retrieval') - } - - try { - // If we don't have a cloudId, we need to fetch it first - if (!params.cloudId) { - if (!response.ok) { - const errorData = await response.json().catch(() => null) - throw new Error(errorData?.message || `Failed to fetch accessible resources: ${response.status} ${response.statusText}`) - } - - const accessibleResources = await response.json() - if (!Array.isArray(accessibleResources) || accessibleResources.length === 0) { - throw new Error('No accessible Jira resources found for this account') - } - - const normalizedInput = `https://${params.domain}`.toLowerCase() - const matchedResource = accessibleResources.find(r => r.url.toLowerCase() === normalizedInput) - - if (!matchedResource) { - throw new Error(`Could not find matching Jira site for domain: ${params.domain}`) - } - - // First get issue keys from picker - const pickerUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/picker?currentJQL=project=${params.projectId}` - const pickerResponse = await fetch(pickerUrl, { - method: 'GET', - headers: { - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json' - } - }) - - if (!pickerResponse.ok) { - const errorData = await pickerResponse.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve issue keys: ${pickerResponse.status} ${pickerResponse.statusText}`) - } - - const pickerData = await pickerResponse.json() - const issueKeys = pickerData.sections - .flatMap((section: any) => section.issues || []) - .map((issue: any) => issue.key) - - if (issueKeys.length === 0) { - return { - success: true, - output: [] - } - } - - // Now use bulkfetch to get the full issue details - const bulkfetchUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/bulkfetch` - const bulkfetchResponse = await fetch(bulkfetchUrl, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - expand: ["names"], - fields: ["summary", "description", "created", "updated"], - fieldsByKeys: false, - issueIdsOrKeys: issueKeys, - properties: [] - }) - }) - - if (!bulkfetchResponse.ok) { - const errorData = await bulkfetchResponse.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve Jira issues: ${bulkfetchResponse.status} ${bulkfetchResponse.statusText}`) - } - - const data = await bulkfetchResponse.json() - return { - success: true, - output: data.issues.map((issue: any) => ({ - ts: new Date().toISOString(), - summary: issue.fields.summary, - description: issue.fields.description?.content?.[0]?.content?.[0]?.text || '', - created: issue.fields.created, - updated: issue.fields.updated - })) - } - } - - // If we have a cloudId, this response is from the issue picker - if (!response.ok) { - const errorData = await response.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve issue keys: ${response.status} ${response.statusText}`) - } - - const pickerData = await response.json() - const issueKeys = pickerData.sections - .flatMap((section: any) => section.issues || []) - .map((issue: any) => issue.key) - - if (issueKeys.length === 0) { - return { - success: true, - output: [] - } - } - - // Use bulkfetch to get the full issue details - const bulkfetchUrl = `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/bulkfetch` - const bulkfetchResponse = await fetch(bulkfetchUrl, { - method: 'POST', - headers: { - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - expand: ["names"], - fields: ["summary", "description", "created", "updated"], - fieldsByKeys: false, - issueIdsOrKeys: issueKeys, - properties: [] - }) - }) - - if (!bulkfetchResponse.ok) { - const errorData = await bulkfetchResponse.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve Jira issues: ${bulkfetchResponse.status} ${bulkfetchResponse.statusText}`) - } - - const data = await bulkfetchResponse.json() - return { - success: true, - output: data.issues.map((issue: any) => ({ - ts: new Date().toISOString(), - summary: issue.fields.summary, - description: issue.fields.description?.content?.[0]?.content?.[0]?.text || '', - created: issue.fields.created, - updated: issue.fields.updated - })) - } - } catch (error) { - throw error instanceof Error ? error : new Error(String(error)) - } - }, - transformError: (error: any) => { - return error.message || 'Failed to retrieve Jira issues' - } - } \ No newline at end of file diff --git a/sim/tools/jira/retrieve.ts b/sim/tools/jira/retrieve.ts deleted file mode 100644 index df2b139bd..000000000 --- a/sim/tools/jira/retrieve.ts +++ /dev/null @@ -1,154 +0,0 @@ -import { ToolConfig } from '../types' -import { JiraRetrieveResponse, JiraRetrieveParams } from './types' - -export const jiraRetrieveTool: ToolConfig = { - id: 'jira_retrieve', - name: 'Jira Retrieve', - description: 'Retrieve detailed information about a specific Jira issue', - version: '1.0.0', - - oauth: { - required: true, - provider: 'jira', - additionalScopes: [ - 'read:jira-work', - 'read:jira-user', - 'read:me', - 'offline_access', - ], - }, - params: { - accessToken: { - type: 'string', - required: true, - description: 'OAuth access token for Jira', - }, - domain: { - type: 'string', - required: true, - requiredForToolCall: true, - description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', - }, - projectId: { - type: 'string', - required: false, - description: 'Jira project ID to retrieve issues from. If not provided, all issues will be retrieved.', - }, - issueKey: { - type: 'string', - required: true, - description: 'Jira issue key to retrieve (e.g., PROJ-123)', - }, - cloudId: { - type: 'string', - required: false, - description: 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', - }, - }, - - request: { - url: (params: JiraRetrieveParams) => { - if (params.cloudId) { - return `https://api.atlassian.com/ex/jira/${params.cloudId}/rest/api/3/issue/${params.issueKey}?expand=renderedFields,names,schema,transitions,operations,editmeta,changelog` - } - // If no cloudId, use the accessible resources endpoint - return 'https://api.atlassian.com/oauth/token/accessible-resources' - }, - method: 'GET', - headers: (params: JiraRetrieveParams) => { - return { - 'Accept': 'application/json', - 'Authorization': `Bearer ${params.accessToken}`, - } - }, - }, - - transformResponse: async (response: Response, params?: JiraRetrieveParams) => { - if (!params) { - throw new Error('Parameters are required for Jira issue retrieval') - } - - try { - // If we don't have a cloudId, we need to fetch it first - if (!params.cloudId) { - if (!response.ok) { - const errorData = await response.json().catch(() => null) - throw new Error(errorData?.message || `Failed to fetch accessible resources: ${response.status} ${response.statusText}`) - } - - const accessibleResources = await response.json() - if (!Array.isArray(accessibleResources) || accessibleResources.length === 0) { - throw new Error('No accessible Jira resources found for this account') - } - - const normalizedInput = `https://${params.domain}`.toLowerCase() - const matchedResource = accessibleResources.find(r => r.url.toLowerCase() === normalizedInput) - - if (!matchedResource) { - throw new Error(`Could not find matching Jira site for domain: ${params.domain}`) - } - - // Now fetch the actual issue with the found cloudId - const issueUrl = `https://api.atlassian.com/ex/jira/${matchedResource.id}/rest/api/3/issue/${params.issueKey}?expand=renderedFields,names,schema,transitions,operations,editmeta,changelog` - const issueResponse = await fetch(issueUrl, { - method: 'GET', - headers: { - 'Accept': 'application/json', - 'Authorization': `Bearer ${params.accessToken}`, - } - }) - - if (!issueResponse.ok) { - const errorData = await issueResponse.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve Jira issue: ${issueResponse.status} ${issueResponse.statusText}`) - } - - const data = await issueResponse.json() - if (!data || !data.fields) { - throw new Error('Invalid response format from Jira API') - } - - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: data.key, - summary: data.fields.summary, - description: data.fields.description, - created: data.fields.created, - updated: data.fields.updated, - }, - } - } - - // If we have a cloudId, this response is the issue data - if (!response.ok) { - const errorData = await response.json().catch(() => null) - throw new Error(errorData?.message || `Failed to retrieve Jira issue: ${response.status} ${response.statusText}`) - } - - const data = await response.json() - if (!data || !data.fields) { - throw new Error('Invalid response format from Jira API') - } - - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: data.key, - summary: data.fields.summary, - description: data.fields.description, - created: data.fields.created, - updated: data.fields.updated, - }, - } - } catch (error) { - throw error instanceof Error ? error : new Error(String(error)) - } - }, - - transformError: (error: any) => { - return error.message || 'Failed to retrieve Jira issue' - }, -} \ No newline at end of file diff --git a/sim/tools/jira/update.ts b/sim/tools/jira/update.ts deleted file mode 100644 index b5c00ce3d..000000000 --- a/sim/tools/jira/update.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { ToolConfig } from '../types' -import { JiraUpdateResponse, JiraUpdateParams } from './types' -import { getJiraCloudId } from './utils' - -export const jiraUpdateTool: ToolConfig = { - id: 'jira_update', - name: 'Jira Update', - description: 'Update a Jira issue', - version: '1.0.0', - - oauth: { - required: true, - provider: 'jira', - additionalScopes: [ - 'read:jira-user', - 'write:jira-work', - 'write:issue:jira', - 'read:jira-work', - ], - }, - - params: { - accessToken: { - type: 'string', - required: true, - description: 'OAuth access token for Jira', - }, - domain: { - type: 'string', - required: true, - requiredForToolCall: true, - description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', - }, - projectId: { - type: 'string', - required: false, - description: 'Jira project ID to update issues in. If not provided, all issues will be retrieved.', - }, - issueKey: { - type: 'string', - required: true, - description: 'Jira issue key to update', - }, - summary: { - type: 'string', - required: false, - description: 'New summary for the issue', - }, - description: { - type: 'string', - required: false, - description: 'New description for the issue', - }, - status: { - type: 'string', - required: false, - description: 'New status for the issue', - }, - priority: { - type: 'string', - required: false, - description: 'New priority for the issue', - }, - assignee: { - type: 'string', - required: false, - description: 'New assignee for the issue', - }, - cloudId: { - type: 'string', - required: false, - description: 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', - }, - }, - - directExecution: async (params) => { - // Pre-fetch the cloudId if not provided - if (!params.cloudId) { - try { - params.cloudId = await getJiraCloudId(params.domain, params.accessToken) - } catch (error) { - throw error - } - } - return undefined // Let the regular request handling take over - }, - - request: { - url: (params) => { - const { domain, issueKey, cloudId } = params - if (!domain || !issueKey || !cloudId) { - throw new Error('Domain, issueKey, and cloudId are required') - } - - const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue/${issueKey}` - return url - }, - method: 'PUT', - headers: (params) => ({ - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }), - body: (params) => { - - // Map the summary from either summary or title field - const summaryValue = params.summary || params.title - const descriptionValue = params.description - - - - const fields: Record = {} - - if (summaryValue) { - fields.summary = summaryValue - } - - if (descriptionValue) { - fields.description = { - type: 'doc', - version: 1, - content: [ - { - type: 'paragraph', - content: [ - { - type: 'text', - text: descriptionValue - } - ] - } - ] - } - } - - if (params.status) { - fields.status = { - name: params.status - } - } - - if (params.priority) { - fields.priority = { - name: params.priority - } - } - - if (params.assignee) { - fields.assignee = { - id: params.assignee - } - } - - return { fields } - } - }, - - transformResponse: async (response: Response, params?: JiraUpdateParams) => { - // Log the response details for debugging - const responseText = await response.text() - - - if (!response.ok) { - try { - if (responseText) { - const data = JSON.parse(responseText) - throw new Error( - data.errorMessages?.[0] || - data.errors?.[Object.keys(data.errors)[0]] || - data.message || - 'Failed to update Jira issue' - ) - } else { - throw new Error(`Request failed with status ${response.status}: ${response.statusText}`) - } - } catch (e) { - if (e instanceof SyntaxError) { - // If we can't parse the response as JSON, return the raw text - throw new Error(`Jira API error (${response.status}): ${responseText}`) - } - throw e - } - } - - // For successful responses - try { - if (!responseText) { - // Some successful PUT requests might return no content - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: params?.issueKey || 'unknown', - summary: 'Issue updated successfully', - success: true - }, - } - } - - const data = JSON.parse(responseText) - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: data.key || params?.issueKey || 'unknown', - summary: data.fields?.summary || 'Issue updated', - success: true - }, - } - } catch (e) { - // If we can't parse the response but it was successful, still return success - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: params?.issueKey || 'unknown', - summary: 'Issue updated (response parsing failed)', - success: true - }, - } - } - }, - - transformError: (error: any) => { - return error.message || 'Failed to update Jira issue' - } -} \ No newline at end of file diff --git a/sim/tools/jira/utils.ts b/sim/tools/jira/utils.ts deleted file mode 100644 index 8ad304b71..000000000 --- a/sim/tools/jira/utils.ts +++ /dev/null @@ -1,33 +0,0 @@ -export async function getJiraCloudId(domain: string, accessToken: string): Promise { - try { - const response = await fetch('https://api.atlassian.com/oauth/token/accessible-resources', { - method: 'GET', - headers: { - 'Authorization': `Bearer ${accessToken}`, - 'Accept': 'application/json' - } - }) - - const resources = await response.json() - - // If we have resources, find the matching one - if (Array.isArray(resources) && resources.length > 0) { - const normalizedInput = `https://${domain}`.toLowerCase() - const matchedResource = resources.find(r => r.url.toLowerCase() === normalizedInput) - - if (matchedResource) { - return matchedResource.id - } - } - - // If we couldn't find a match, return the first resource's ID - // This is a fallback in case the URL matching fails - if (Array.isArray(resources) && resources.length > 0) { - return resources[0].id - } - - throw new Error('No Jira resources found') - } catch (error) { - throw error - } -} \ No newline at end of file diff --git a/sim/tools/jira/write.ts b/sim/tools/jira/write.ts deleted file mode 100644 index ba935ce12..000000000 --- a/sim/tools/jira/write.ts +++ /dev/null @@ -1,227 +0,0 @@ -import { ToolConfig } from '../types' -import { JiraWriteResponse, JiraWriteParams } from './types' -import { getJiraCloudId } from './utils' - -export const jiraWriteTool: ToolConfig = { - id: 'jira_write', - name: 'Jira Write', - description: 'Write a Jira issue', - version: '1.0.0', - - oauth: { - required: true, - provider: 'jira', - additionalScopes: [ - 'read:jira-user', - 'write:jira-work', - 'read:project:jira', - 'read:issue:jira', - 'write:issue:jira', - 'write:comment:jira', - 'write:comment.property:jira', - 'write:attachment:jira', - 'read:attachment:jira', - ], - }, - - params: { - accessToken: { - type: 'string', - required: true, - description: 'OAuth access token for Jira', - }, - domain: { - type: 'string', - required: true, - requiredForToolCall: true, - description: 'Your Jira domain (e.g., yourcompany.atlassian.net)', - }, - projectId: { - type: 'string', - required: true, - description: 'Project ID for the issue', - }, - summary: { - type: 'string', - required: true, - description: 'Summary for the issue', - }, - description: { - type: 'string', - required: false, - description: 'Description for the issue', - }, - priority: { - type: 'string', - required: false, - description: 'Priority for the issue', - }, - assignee: { - type: 'string', - required: false, - description: 'Assignee for the issue', - }, - cloudId: { - type: 'string', - required: false, - description: 'Jira Cloud ID for the instance. If not provided, it will be fetched using the domain.', - }, - issueType: { - type: 'string', - required: true, - description: 'Type of issue to create (e.g., Task, Story, Bug, Sub-task)', - }, - }, - - directExecution: async (params) => { - // Pre-fetch the cloudId if not provided - if (!params.cloudId) { - try { - params.cloudId = await getJiraCloudId(params.domain, params.accessToken) - } catch (error) { - throw error - } - } - return undefined // Let the regular request handling take over - }, - - request: { - url: (params) => { - const { domain, cloudId } = params - if (!domain || !cloudId) { - throw new Error('Domain and cloudId are required') - } - - const url = `https://api.atlassian.com/ex/jira/${cloudId}/rest/api/3/issue` - - return url - }, - method: 'POST', - headers: (params) => ({ - 'Authorization': `Bearer ${params.accessToken}`, - 'Accept': 'application/json', - 'Content-Type': 'application/json' - }), - body: (params) => { - - // Validate required fields - if (!params.projectId) { - throw new Error('Project ID is required') - } - if (!params.summary) { - throw new Error('Summary is required') - } - if (!params.issueType) { - throw new Error('Issue type is required') - } - - // Construct fields object with only the necessary fields - const fields: Record = { - project: { - id: params.projectId - }, - issuetype: { - name: params.issueType - }, - summary: params.summary // Use the summary field directly - } - - // Only add description if it exists - if (params.description) { - fields.description = { - type: 'doc', - version: 1, - content: [ - { - type: 'paragraph', - content: [ - { - type: 'text', - text: params.description - } - ] - } - ] - } - } - - // Only add parent if it exists - if (params.parent) { - fields.parent = params.parent - } - - const body = { fields } - return body - } - }, - - transformResponse: async (response: Response, params?: JiraWriteParams) => { - // Log the response details for debugging - const responseText = await response.text() - - if (!response.ok) { - try { - if (responseText) { - const data = JSON.parse(responseText) - throw new Error( - data.errorMessages?.[0] || - data.errors?.[Object.keys(data.errors)[0]] || - data.message || - 'Failed to create Jira issue' - ) - } else { - throw new Error(`Request failed with status ${response.status}: ${response.statusText}`) - } - } catch (e) { - if (e instanceof SyntaxError) { - // If we can't parse the response as JSON, return the raw text - throw new Error(`Jira API error (${response.status}): ${responseText}`) - } - throw e - } - } - - // For successful responses - try { - if (!responseText) { - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: 'unknown', - summary: 'Issue created successfully', - success: true, - url: '' - }, - } - } - - const data = JSON.parse(responseText) - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: data.key || 'unknown', - summary: data.fields?.summary || 'Issue created', - success: true, - url: `https://${params?.domain}/browse/${data.key}` - }, - } - } catch (e) { - return { - success: true, - output: { - ts: new Date().toISOString(), - issueKey: 'unknown', - summary: 'Issue created (response parsing failed)', - success: true, - url: '' - }, - } - } - }, - - transformError: (error: any) => { - return error.message || 'Failed to create Jira issue' - } -} \ No newline at end of file diff --git a/sim/tools/linkup/index.ts b/sim/tools/linkup/index.ts deleted file mode 100644 index e22278ea9..000000000 --- a/sim/tools/linkup/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { searchTool } from './search' - -export const linkupSearchTool = searchTool \ No newline at end of file diff --git a/sim/tools/mem0/add_memories.ts b/sim/tools/mem0/add_memories.ts deleted file mode 100644 index ec1b63e18..000000000 --- a/sim/tools/mem0/add_memories.ts +++ /dev/null @@ -1,120 +0,0 @@ -import { ToolConfig } from "../types" - -// Add Memories Tool -export const mem0AddMemoriesTool: ToolConfig = { - id: 'mem0_add_memories', - name: 'Add Memories', - description: 'Add memories to Mem0 for persistent storage and retrieval', - version: '1.0.0', - params: { - apiKey: { - type: 'string', - required: true, - description: 'Your Mem0 API key', - }, - userId: { - type: 'string', - required: true, - description: 'User ID associated with the memory', - }, - messages: { - type: 'json', - required: true, - description: 'Array of message objects with role and content', - }, - }, - request: { - url: 'https://api.mem0.ai/v1/memories/', - method: 'POST', - headers: (params) => ({ - Authorization: `Token ${params.apiKey}`, - 'Content-Type': 'application/json', - }), - body: (params) => { - // First, ensure messages is an array - let messagesArray = params.messages - if (typeof messagesArray === 'string') { - try { - messagesArray = JSON.parse(messagesArray) - } catch (e) { - throw new Error('Messages must be a valid JSON array of objects with role and content') - } - } - - // Validate message format - if (!Array.isArray(messagesArray) || messagesArray.length === 0) { - throw new Error('Messages must be a non-empty array') - } - - for (const msg of messagesArray) { - if (!msg.role || !msg.content) { - throw new Error('Each message must have role and content properties') - } - } - - // Prepare request body - const body: Record = { - messages: messagesArray, - version: 'v2', - user_id: params.userId - } - - return body - }, - }, - transformResponse: async (response) => { - const data = await response.json() - - // If the API returns an empty array, this might be normal behavior on success - if (Array.isArray(data) && data.length === 0) { - return { - success: true, - output: { - memories: [], - }, - } - } - - // Handle array response with memory objects - if (Array.isArray(data) && data.length > 0) { - // Extract IDs for easy access - const memoryIds = data.map(memory => memory.id) - - return { - success: true, - output: { - ids: memoryIds, - memories: data, - }, - } - } - - // Handle non-array responses (single memory object) - if (data && !Array.isArray(data) && data.id) { - return { - success: true, - output: { - ids: [data.id], - memories: [data], - }, - } - } - - // Default response format if none of the above match - return { - success: true, - output: { - memories: Array.isArray(data) ? data : [data], - }, - } - }, - transformError: async (error) => { - return { - success: false, - output: { - ids: [], - memories: [], - } - } - }, - } \ No newline at end of file diff --git a/sim/tools/mem0/get_memories.ts b/sim/tools/mem0/get_memories.ts deleted file mode 100644 index ff5bea876..000000000 --- a/sim/tools/mem0/get_memories.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { ToolConfig } from "../types" - -// Get Memories Tool -export const mem0GetMemoriesTool: ToolConfig = { - id: 'mem0_get_memories', - name: 'Get Memories', - description: 'Retrieve memories from Mem0 by ID or filter criteria', - version: '1.0.0', - params: { - apiKey: { - type: 'string', - required: true, - description: 'Your Mem0 API key', - }, - userId: { - type: 'string', - required: true, - description: 'User ID to retrieve memories for', - }, - memoryId: { - type: 'string', - required: false, - description: 'Specific memory ID to retrieve', - }, - startDate: { - type: 'string', - required: false, - description: 'Start date for filtering by created_at (format: YYYY-MM-DD)', - }, - endDate: { - type: 'string', - required: false, - description: 'End date for filtering by created_at (format: YYYY-MM-DD)', - }, - limit: { - type: 'number', - required: false, - default: 10, - description: 'Maximum number of results to return', - }, - }, - request: { - url: (params: Record) => { - // For a specific memory ID, use the get single memory endpoint - if (params.memoryId) { - // Dynamically set method to GET for memory ID requests - params.method = 'GET' - return `https://api.mem0.ai/v1/memories/${params.memoryId}/` - } - // Otherwise use v2 memories endpoint with filters - return 'https://api.mem0.ai/v2/memories/' - }, - method: 'POST', // Default to POST for filtering - headers: (params) => ({ - 'Content-Type': 'application/json', - Authorization: `Token ${params.apiKey}`, - }), - body: (params: Record) => { - // For specific memory ID, we'll use GET method instead and don't need a body - // But we still need to return an empty object to satisfy the type - if (params.memoryId) { - return {} - } - - // Build filters array for AND condition - const andConditions = [] - - // Add user filter - andConditions.push({ "user_id": params.userId }) - - // Add date range filter if provided - if (params.startDate || params.endDate) { - const dateFilter: Record = {} - - if (params.startDate) { - dateFilter.gte = params.startDate - } - - if (params.endDate) { - dateFilter.lte = params.endDate - } - - andConditions.push({ "created_at": dateFilter }) - } - - // Build final filters object - const body: Record = { - page_size: params.limit || 10 - } - - // Only add filters if we have any conditions - if (andConditions.length > 0) { - body.filters = { "AND": andConditions } - } - - return body - }, - }, - transformResponse: async (response, params) => { - try { - // Get raw response for debugging - const responseText = await response.clone().text() - - // Parse the response - const data = JSON.parse(responseText) - - // Format the memories for display - const memories = Array.isArray(data) ? data : [data] - - // Extract IDs if available - const ids = memories.map(memory => memory.id).filter(Boolean) - - return { - success: true, - output: { - memories, - ids, - }, - } - } catch (error: any) { - return { - success: false, - output: { - error: `Failed to process get memories response: ${error.message}`, - } - } - } - }, - transformError: async (error) => { - return { - success: false, - output: { - ids: [], - memories: [], - } - } - }, - } \ No newline at end of file diff --git a/sim/tools/mem0/search_memories.ts b/sim/tools/mem0/search_memories.ts deleted file mode 100644 index ff5a22063..000000000 --- a/sim/tools/mem0/search_memories.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { ToolConfig } from "../types" - -// Search Memories Tool -export const mem0SearchMemoriesTool: ToolConfig = { - id: 'mem0_search_memories', - name: 'Search Memories', - description: 'Search for memories in Mem0 using semantic search', - version: '1.0.0', - params: { - apiKey: { - type: 'string', - required: true, - description: 'Your Mem0 API key', - }, - userId: { - type: 'string', - required: true, - description: 'User ID to search memories for', - }, - query: { - type: 'string', - required: true, - description: 'Search query to find relevant memories', - }, - limit: { - type: 'number', - required: false, - default: 10, - description: 'Maximum number of results to return', - }, - }, - request: { - url: 'https://api.mem0.ai/v2/memories/search/', - method: 'POST', - headers: (params) => ({ - 'Content-Type': 'application/json', - Authorization: `Token ${params.apiKey}`, - }), - body: (params) => { - try { - // Create the request body with the format that the curl test confirms works - const body: Record = { - query: params.query || "test", - filters: { - user_id: params.userId - }, - top_k: params.limit || 10 - } - - return body - } catch (error) { - throw error - } - }, - }, - transformResponse: async (response) => { - try { - // Get raw response for debugging - const responseText = await response.clone().text() - - // Parse the response - const data = JSON.parse(responseText) - - // Handle empty results - if (!data || (Array.isArray(data) && data.length === 0)) { - return { - success: true, - output: { - searchResults: [], - ids: [], - } - } - } - - // For array results (standard format) - if (Array.isArray(data)) { - const searchResults = data.map(item => ({ - id: item.id, - data: { memory: item.memory || "" }, - score: item.score || 0 - })) - - const ids = data.map(item => item.id).filter(Boolean) - - return { - success: true, - output: { - searchResults, - ids, - } - } - } - - // Fallback for unexpected response format - return { - success: true, - output: { - searchResults: [], - } - } - } catch (error: any) { - return { - success: false, - output: { - error: `Failed to process search response: ${error.message}`, - } - } - } - }, - transformError: async (error) => { - return { - success: false, - output: { - ids: [], - searchResults: [], - } - } - }, - } \ No newline at end of file diff --git a/sim/tools/mem0/types.ts b/sim/tools/mem0/types.ts deleted file mode 100644 index 7b284c011..000000000 --- a/sim/tools/mem0/types.ts +++ /dev/null @@ -1,9 +0,0 @@ -import { ToolResponse } from "../types" - -export interface Mem0Response extends ToolResponse { - output: { - ids?: string[] - memories?: any[] - searchResults?: any[] - } - } \ No newline at end of file diff --git a/sim/tools/openai/types.ts b/sim/tools/openai/types.ts deleted file mode 100644 index e9a02397f..000000000 --- a/sim/tools/openai/types.ts +++ /dev/null @@ -1,20 +0,0 @@ -import { ToolResponse } from "../types" - -export interface DalleResponse extends ToolResponse { - output: { - content: string // This will now be the image URL - image: string // This will be the base64 image data - metadata: { - model: string // Only contains model name now - } - } - } - - -export interface OpenAIEmbeddingsParams { - apiKey: string - input: string | string[] - model?: string - encoding_format?: 'float' | 'base64' - user?: string - } \ No newline at end of file diff --git a/sim/tools/serper/types.ts b/sim/tools/serper/types.ts deleted file mode 100644 index b5d2886e2..000000000 --- a/sim/tools/serper/types.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { ToolResponse } from "../types" - -export interface SearchParams { - query: string - apiKey: string - num?: number - gl?: string // country code - hl?: string // language code - type?: 'search' | 'news' | 'places' | 'images' - } - - export interface SearchResult { - title: string - link: string - snippet: string - position: number - imageUrl?: string - date?: string - rating?: string - reviews?: string - address?: string - } - - export interface SearchResponse extends ToolResponse { - output: { - searchResults: SearchResult[] - } - } \ No newline at end of file diff --git a/sim/tools/slack/types.ts b/sim/tools/slack/types.ts deleted file mode 100644 index 461868bec..000000000 --- a/sim/tools/slack/types.ts +++ /dev/null @@ -1,14 +0,0 @@ -import { ToolResponse } from "../types" - -export interface SlackMessageParams { - apiKey: string - channel: string - text: string - } - - export interface SlackMessageResponse extends ToolResponse { - output: { - ts: string - channel: string - } - } \ No newline at end of file diff --git a/sim/tools/telegram/types.ts b/sim/tools/telegram/types.ts deleted file mode 100644 index 0c0a3bdf6..000000000 --- a/sim/tools/telegram/types.ts +++ /dev/null @@ -1,23 +0,0 @@ -import { ToolResponse } from "../types" - -export interface TelegramMessageParams { - botToken: string - chatId: string - text: string - } - - export interface TelegramMessageResponse extends ToolResponse { - output: { - ok: boolean - result: { - message_id: number - chat: { - id: number - type: string - username: string - } - date: number - text: string - } - } - } \ No newline at end of file diff --git a/sim/tools/typeform/types.ts b/sim/tools/typeform/types.ts deleted file mode 100644 index 8a6f9228a..000000000 --- a/sim/tools/typeform/types.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { ToolResponse } from "../types" - -export interface TypeformFilesParams { - formId: string - responseId: string - fieldId: string - filename: string - inline?: boolean - apiKey: string - } - - export interface TypeformFilesResponse extends ToolResponse { - output: { - fileUrl: string - contentType: string - filename: string - } - } - - export interface TypeformInsightsParams { - formId: string - apiKey: string - } - - // This is the actual output data structure from the API - export interface TypeformInsightsData { - fields: Array<{ - dropoffs: number - id: string - label: string - ref: string - title: string - type: string - views: number - }> - form: { - platforms: Array<{ - average_time: number - completion_rate: number - platform: string - responses_count: number - total_visits: number - unique_visits: number - }> - summary: { - average_time: number - completion_rate: number - responses_count: number - total_visits: number - unique_visits: number - } - } - } - - // The ToolResponse uses a union type to allow either successful data or empty object in error case - export interface TypeformInsightsResponse extends ToolResponse { - output: TypeformInsightsData | Record - } - - export interface TypeformResponsesParams { - formId: string - apiKey: string - pageSize?: number - since?: string - until?: string - completed?: string - } - - export interface TypeformResponsesResponse extends ToolResponse { - output: { - total_items: number - page_count: number - items: Array<{ - landing_id: string - token: string - landed_at: string - submitted_at: string - metadata: { - user_agent: string - platform: string - referer: string - network_id: string - browser: string - } - answers: Array<{ - field: { - id: string - type: string - ref: string - } - type: string - [key: string]: any - }> - hidden: Record - calculated: { - score: number - } - variables: Array<{ - key: string - type: string - [key: string]: any - }> - }> - } - } \ No newline at end of file diff --git a/sim/tools/vision/types.ts b/sim/tools/vision/types.ts deleted file mode 100644 index 9068bd854..000000000 --- a/sim/tools/vision/types.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { ToolResponse } from "../types" - -export interface VisionParams { - apiKey: string - imageUrl: string - model?: string - prompt?: string - } - - export interface VisionResponse extends ToolResponse { - output: { - content: string - model?: string - tokens?: number - } - } \ No newline at end of file diff --git a/sim/tools/youtube/types.ts b/sim/tools/youtube/types.ts deleted file mode 100644 index b7a05b9f7..000000000 --- a/sim/tools/youtube/types.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { ToolResponse } from "../types" - -export interface YouTubeSearchParams { - apiKey: string - query: string - maxResults?: number - pageToken?: string - } - - export interface YouTubeSearchResponse extends ToolResponse { - output: { - items: Array<{ - videoId: string - title: string - description: string - thumbnail: string - }> - totalResults: number - nextPageToken?: string - } - } \ No newline at end of file diff --git a/start_simstudio_docker.sh b/start_simstudio_docker.sh index 5d368bbfb..6d46296cd 100755 --- a/start_simstudio_docker.sh +++ b/start_simstudio_docker.sh @@ -1,7 +1,7 @@ #!/bin/bash SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -SIM_DIR=$SCRIPT_DIR/sim +SIM_DIR=$SCRIPT_DIR/apps/sim # Function to display help show_help() { @@ -64,7 +64,7 @@ sleep 5 # Apply migrations automatically echo "Applying database migrations..." -docker compose exec simstudio npm run db:push +docker compose exec simstudio bash -c "cd apps/sim && npm run db:push" echo "Sim Studio is now running at http://localhost:3000" echo "To view logs, run: docker compose logs -f simstudio" \ No newline at end of file diff --git a/turbo.json b/turbo.json new file mode 100644 index 000000000..22e2ee83d --- /dev/null +++ b/turbo.json @@ -0,0 +1,31 @@ +{ + "$schema": "https://turbo.build/schema.json", + "envMode": "loose", + "tasks": { + "build": { + "dependsOn": ["^build"], + "inputs": ["$TURBO_DEFAULT$", ".env*"], + "outputs": [".next/**", "!.next/cache/**"] + }, + "dev": { + "persistent": true, + "cache": false + }, + "start": { + "cache": false + }, + "test": { + "dependsOn": ["^build"], + "outputs": [] + }, + "format": { + "outputs": [] + }, + "format:check": { + "outputs": [] + }, + "lint": { + "outputs": [] + } + } +}