mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-05 05:04:10 -05:00
feat(bun): upgrade to bun, reduce docker image size by 95%, upgrade docs & ci (#371)
* migrate to bun * added envvars to drizzle * upgrade bun devcontainer feature to a valid one * added bun, docker not working * updated envvars, updated to bunder and esnext modules * fixed build, reinstated otel * feat: optimized multi-stage docker images * add coerce for boolean envvar * feat: add docker-compose configuration for local LLM services and remove legacy Dockerfile and entrypoint script * feat: add docker-compose files for local and production environments, and implement GitHub Actions for Docker image build and publish * refactor: remove unused generateStaticParams function from various API routes and maintain dynamic rendering * cleanup * upgraded bun * updated ci * fixed build --------- Co-authored-by: Aditya Tripathi <aditya@climactic.co>
This commit is contained in:
@@ -19,16 +19,22 @@ alias check-db="PGPASSWORD=postgres psql -h db -U postgres -c '\l'"
|
||||
|
||||
# Sim Studio specific aliases
|
||||
alias logs="cd /workspace/apps/sim && tail -f logs/*.log 2>/dev/null || echo 'No log files found'"
|
||||
alias sim-start="cd /workspace && npm run dev"
|
||||
alias sim-migrate="cd /workspace/apps/sim && npx drizzle-kit push"
|
||||
alias sim-generate="cd /workspace/apps/sim && npx drizzle-kit generate"
|
||||
alias sim-rebuild="cd /workspace && npm run build && npm run dev"
|
||||
alias docs-dev="cd /workspace/apps/docs && npm run dev"
|
||||
alias sim-start="cd /workspace && bun run dev"
|
||||
alias sim-migrate="cd /workspace/apps/sim && bunx drizzle-kit push"
|
||||
alias sim-generate="cd /workspace/apps/sim && bunx drizzle-kit generate"
|
||||
alias sim-rebuild="cd /workspace && bun run build && bun run start"
|
||||
alias docs-dev="cd /workspace/apps/docs && bun run dev"
|
||||
|
||||
# Turbo related commands
|
||||
alias turbo-build="cd /workspace && npx turbo run build"
|
||||
alias turbo-dev="cd /workspace && npx turbo run dev"
|
||||
alias turbo-test="cd /workspace && npx turbo run test"
|
||||
alias turbo-build="cd /workspace && bunx turbo run build"
|
||||
alias turbo-dev="cd /workspace && bunx turbo run dev"
|
||||
alias turbo-test="cd /workspace && bunx turbo run test"
|
||||
|
||||
# Bun specific commands
|
||||
alias bun-update="cd /workspace && bun update"
|
||||
alias bun-add="cd /workspace && bun add"
|
||||
alias bun-pm="cd /workspace && bun pm"
|
||||
alias bun-canary="bun upgrade --canary"
|
||||
|
||||
# Default to workspace directory
|
||||
cd /workspace 2>/dev/null || true
|
||||
@@ -52,6 +58,12 @@ if [ -z "$SIM_WELCOME_SHOWN" ]; then
|
||||
echo " turbo-build - Build all apps using Turborepo"
|
||||
echo " turbo-dev - Start development mode for all apps"
|
||||
echo " turbo-test - Run tests for all packages"
|
||||
echo ""
|
||||
echo "Bun commands:"
|
||||
echo " bun-update - Update dependencies"
|
||||
echo " bun-add - Add a new dependency"
|
||||
echo " bun-pm - Manage dependencies"
|
||||
echo " bun-canary - Upgrade to the latest canary version of Bun"
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo ""
|
||||
fi
|
||||
@@ -1,4 +1,5 @@
|
||||
FROM node:20-bullseye
|
||||
# Use the latest Bun canary image for development
|
||||
FROM oven/bun:canary
|
||||
|
||||
# Avoid warnings by switching to noninteractive
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
@@ -6,12 +7,13 @@ ENV DEBIAN_FRONTEND=noninteractive
|
||||
# Install necessary packages for development
|
||||
RUN apt-get update \
|
||||
&& apt-get -y install --no-install-recommends \
|
||||
git curl wget jq sudo postgresql-client \
|
||||
git curl wget jq sudo postgresql-client vim nano \
|
||||
bash-completion ca-certificates lsb-release gnupg \
|
||||
&& apt-get clean -y \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create a non-root user
|
||||
ARG USERNAME=node
|
||||
ARG USERNAME=bun
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=$USER_UID
|
||||
|
||||
@@ -19,11 +21,15 @@ ARG USER_GID=$USER_UID
|
||||
RUN echo "$USERNAME ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/$USERNAME \
|
||||
&& chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
|
||||
# Make sure we have the latest npm
|
||||
RUN npm install -g npm@latest
|
||||
# Install global packages for development
|
||||
RUN bun install -g turbo drizzle-kit typescript @types/node
|
||||
|
||||
# Install global packages
|
||||
RUN npm install -g drizzle-kit turbo
|
||||
# Install bun completions
|
||||
RUN bun completions > /etc/bash_completion.d/bun
|
||||
|
||||
# Set up shell environment
|
||||
RUN echo "export PATH=$PATH:/home/$USERNAME/.bun/bin" >> /etc/profile
|
||||
RUN echo "source /etc/profile" >> /etc/bash.bashrc
|
||||
|
||||
# Switch back to dialog for any ad-hoc use of apt-get
|
||||
ENV DEBIAN_FRONTEND=dialog
|
||||
|
||||
@@ -33,7 +33,7 @@ This directory contains configuration files for Visual Studio Code Dev Container
|
||||
- Run database migrations
|
||||
- Configure helpful aliases
|
||||
|
||||
5. Start the application with `sim-start` (alias for `npm run dev`)
|
||||
5. Start the application with `sim-start` (alias for `bun run dev`)
|
||||
|
||||
### Development Commands
|
||||
|
||||
|
||||
@@ -38,7 +38,8 @@
|
||||
"rvest.vs-code-prettier-eslint",
|
||||
"mikestead.dotenv",
|
||||
"dsznajder.es7-react-js-snippets",
|
||||
"steoates.autoimport"
|
||||
"steoates.autoimport",
|
||||
"oven.bun-vscode"
|
||||
]
|
||||
}
|
||||
},
|
||||
@@ -49,13 +50,12 @@
|
||||
|
||||
"postStartCommand": "bash -c 'if [ ! -f ~/.bashrc ] || ! grep -q \"sim-start\" ~/.bashrc; then cp .devcontainer/.bashrc ~/.bashrc; fi'",
|
||||
|
||||
"remoteUser": "node",
|
||||
"remoteUser": "bun",
|
||||
|
||||
"features": {
|
||||
"ghcr.io/devcontainers/features/git:1": {},
|
||||
"ghcr.io/devcontainers-contrib/features/npm-package:1": {
|
||||
"package": "typescript",
|
||||
"version": "latest"
|
||||
}
|
||||
"ghcr.io/prulloac/devcontainer-features/bun:1": {
|
||||
"version": "latest"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ services:
|
||||
dockerfile: .devcontainer/Dockerfile
|
||||
volumes:
|
||||
- ..:/workspace:cached
|
||||
- bun-cache:/home/bun/.bun/cache:delegated
|
||||
command: sleep infinity
|
||||
environment:
|
||||
- NODE_ENV=development
|
||||
@@ -14,6 +15,7 @@ services:
|
||||
- POSTGRES_URL=postgresql://postgres:postgres@db:5432/simstudio
|
||||
- BETTER_AUTH_URL=http://localhost:3000
|
||||
- NEXT_PUBLIC_APP_URL=http://localhost:3000
|
||||
- BUN_INSTALL_CACHE_DIR=/home/bun/.bun/cache
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
@@ -41,3 +43,4 @@ services:
|
||||
|
||||
volumes:
|
||||
postgres-data:
|
||||
bun-cache:
|
||||
|
||||
@@ -15,7 +15,7 @@ cp /workspace/.devcontainer/.bashrc ~/.bashrc
|
||||
echo 'if [ -f ~/.bashrc ]; then . ~/.bashrc; fi' >> ~/.profile
|
||||
|
||||
# Clean and reinstall dependencies to ensure platform compatibility
|
||||
echo "📦 Cleaning and reinstalling npm dependencies..."
|
||||
echo "📦 Cleaning and reinstalling dependencies..."
|
||||
if [ -d "node_modules" ]; then
|
||||
echo "Removing existing node_modules to ensure platform compatibility..."
|
||||
rm -rf node_modules
|
||||
@@ -23,11 +23,26 @@ if [ -d "node_modules" ]; then
|
||||
rm -rf apps/docs/node_modules
|
||||
fi
|
||||
|
||||
# Ensure Bun cache directory exists and has correct permissions
|
||||
mkdir -p ~/.bun/cache
|
||||
chmod 700 ~/.bun ~/.bun/cache
|
||||
|
||||
# Install dependencies with platform-specific binaries
|
||||
npm install || {
|
||||
echo "⚠️ npm install had issues but continuing setup..."
|
||||
echo "Installing dependencies with Bun..."
|
||||
bun install || {
|
||||
echo "⚠️ bun install had issues but continuing setup..."
|
||||
}
|
||||
|
||||
# Check for native dependencies
|
||||
echo "Checking for native dependencies compatibility..."
|
||||
NATIVE_DEPS=$(grep '"trustedDependencies"' apps/sim/package.json || echo "")
|
||||
if [ ! -z "$NATIVE_DEPS" ]; then
|
||||
echo "⚠️ Native dependencies detected. Ensuring compatibility with Bun..."
|
||||
for pkg in $(echo $NATIVE_DEPS | grep -oP '"[^"]*"' | tr -d '"' | grep -v "trustedDependencies"); do
|
||||
echo "Checking compatibility for $pkg..."
|
||||
done
|
||||
fi
|
||||
|
||||
# Set up environment variables if .env doesn't exist for the sim app
|
||||
if [ ! -f "apps/sim/.env" ]; then
|
||||
echo "📄 Creating .env file from template..."
|
||||
@@ -42,7 +57,7 @@ fi
|
||||
echo "🗃️ Running database schema generation and migrations..."
|
||||
echo "Generating schema..."
|
||||
cd apps/sim
|
||||
npx drizzle-kit generate
|
||||
bunx drizzle-kit generate
|
||||
cd ../..
|
||||
|
||||
echo "Waiting for database to be ready..."
|
||||
@@ -53,7 +68,7 @@ echo "Waiting for database to be ready..."
|
||||
if PGPASSWORD=postgres psql -h db -U postgres -c '\q' 2>/dev/null; then
|
||||
echo "Database is ready!"
|
||||
cd apps/sim
|
||||
DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio npx drizzle-kit push
|
||||
DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio bunx drizzle-kit push
|
||||
cd ../..
|
||||
break
|
||||
fi
|
||||
@@ -71,13 +86,14 @@ echo "Waiting for database to be ready..."
|
||||
cat << EOF >> ~/.bashrc
|
||||
|
||||
# Additional Sim Studio Development Aliases
|
||||
alias migrate="cd /workspace/apps/sim && DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio npx drizzle-kit push"
|
||||
alias generate="cd /workspace/apps/sim && npx drizzle-kit generate"
|
||||
alias dev="cd /workspace && npm run dev"
|
||||
alias build="cd /workspace && npm run build"
|
||||
alias start="cd /workspace && npm run dev"
|
||||
alias lint="cd /workspace/apps/sim && npm run lint"
|
||||
alias test="cd /workspace && npm run test"
|
||||
alias migrate="cd /workspace/apps/sim && DATABASE_URL=postgresql://postgres:postgres@db:5432/simstudio bunx drizzle-kit push"
|
||||
alias generate="cd /workspace/apps/sim && bunx drizzle-kit generate"
|
||||
alias dev="cd /workspace && bun run dev"
|
||||
alias build="cd /workspace && bun run build"
|
||||
alias start="cd /workspace && bun run dev"
|
||||
alias lint="cd /workspace/apps/sim && bun run lint"
|
||||
alias test="cd /workspace && bun run test"
|
||||
alias bun-update="cd /workspace && bun update"
|
||||
EOF
|
||||
|
||||
# Source the .bashrc to make aliases available immediately
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# Exclude files from Docker build
|
||||
.git
|
||||
.github
|
||||
node_modules
|
||||
.next
|
||||
.vercel
|
||||
.husky
|
||||
.env
|
||||
.env.*
|
||||
npm-debug.log
|
||||
LICENSE
|
||||
NOTICE
|
||||
.prettierrc
|
||||
.prettierignore
|
||||
README.md
|
||||
.devcontainer
|
||||
.gitignore
|
||||
.husky
|
||||
.github
|
||||
.devcontainer
|
||||
.env.example
|
||||
node_modules
|
||||
12
.github/CONTRIBUTING.md
vendored
12
.github/CONTRIBUTING.md
vendored
@@ -214,9 +214,9 @@ If you prefer not to use Docker or Dev Containers:
|
||||
```
|
||||
2. **Install Dependencies:**
|
||||
|
||||
- Using NPM:
|
||||
- Using Bun:
|
||||
```bash
|
||||
npm install
|
||||
bun install
|
||||
```
|
||||
|
||||
3. **Set Up Environment:**
|
||||
@@ -229,14 +229,14 @@ If you prefer not to use Docker or Dev Containers:
|
||||
- You need a PostgreSQL instance running
|
||||
- Run migrations:
|
||||
```bash
|
||||
npm run db:push
|
||||
bun run db:push
|
||||
```
|
||||
|
||||
5. **Run the Development Server:**
|
||||
|
||||
- With NPM:
|
||||
- With Bun:
|
||||
```bash
|
||||
npm run dev
|
||||
bun run dev
|
||||
```
|
||||
|
||||
6. **Make Your Changes and Test Locally.**
|
||||
@@ -248,7 +248,7 @@ When working on email templates, you can preview them using a local email previe
|
||||
1. **Run the Email Preview Server:**
|
||||
|
||||
```bash
|
||||
npm run email:dev
|
||||
bun run email:dev
|
||||
```
|
||||
|
||||
2. **Access the Preview:**
|
||||
|
||||
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
2
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -26,7 +26,7 @@ Please describe the tests that you ran to verify your changes. Provide instructi
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] All tests pass locally and in CI (`npm test`)
|
||||
- [ ] All tests pass locally and in CI (`bun run test`)
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] Any dependent changes have been merged and published in downstream modules
|
||||
- [ ] I have updated version numbers as needed (if needed)
|
||||
|
||||
12
.github/dependabot.yml
vendored
12
.github/dependabot.yml
vendored
@@ -1,7 +1,7 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: 'npm'
|
||||
directory: 'apps/sim'
|
||||
- package-ecosystem: 'bun'
|
||||
directory: '/apps/sim'
|
||||
schedule:
|
||||
interval: 'weekly'
|
||||
day: 'monday'
|
||||
@@ -21,9 +21,9 @@ updates:
|
||||
patterns:
|
||||
- '*'
|
||||
|
||||
# Documentation site dependencies (/docs)
|
||||
- package-ecosystem: 'npm'
|
||||
directory: 'apps/docs'
|
||||
# Documentation site dependencies (/apps/docs)
|
||||
- package-ecosystem: 'bun'
|
||||
directory: '/apps/docs'
|
||||
schedule:
|
||||
interval: 'weekly'
|
||||
day: 'wednesday'
|
||||
@@ -42,7 +42,7 @@ updates:
|
||||
- '*'
|
||||
|
||||
# Root-level dependencies (if any)
|
||||
- package-ecosystem: 'npm'
|
||||
- package-ecosystem: 'bun'
|
||||
directory: '/'
|
||||
schedule:
|
||||
interval: 'weekly'
|
||||
|
||||
64
.github/workflows/build.yml
vendored
Normal file
64
.github/workflows/build.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Build and Publish Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
tags: ['v*']
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- dockerfile: ./docker/app.Dockerfile
|
||||
image: ghcr.io/simstudioai/simstudio
|
||||
- dockerfile: ./docker/db.Dockerfile
|
||||
image: ghcr.io/simstudioai/migrations
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: simstudioai
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ matrix.image }}
|
||||
tags: |
|
||||
type=raw,value=latest,enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{major}}.{{minor}}.{{patch}}
|
||||
type=sha,format=long
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ${{ matrix.dockerfile }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
35
.github/workflows/ci.yml
vendored
35
.github/workflows/ci.yml
vendored
@@ -15,23 +15,25 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@v1
|
||||
with:
|
||||
bun-version: latest
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './package-lock.json'
|
||||
node-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
|
||||
- name: Install Turbo globally
|
||||
run: npm install -g turbo
|
||||
run: bun install
|
||||
|
||||
- name: Run tests with coverage
|
||||
env:
|
||||
NODE_OPTIONS: '--no-warnings'
|
||||
run: npx turbo run test
|
||||
NEXT_PUBLIC_APP_URL: 'https://www.simstudio.ai'
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run test
|
||||
|
||||
- name: Build application
|
||||
env:
|
||||
@@ -41,7 +43,8 @@ jobs:
|
||||
STRIPE_WEBHOOK_SECRET: 'dummy_secret_for_ci_only'
|
||||
RESEND_API_KEY: 'dummy_key_for_ci_only'
|
||||
AWS_REGION: 'us-west-2'
|
||||
run: npx turbo run build
|
||||
ENCRYPTION_KEY: '7cf672e460e430c1fba707575c2b0e2ad5a99dddf9b7b7e3b5646e630861db1c' # dummy key for CI only
|
||||
run: bun run build
|
||||
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v3
|
||||
@@ -59,18 +62,16 @@ jobs:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
- name: Setup Bun
|
||||
uses: oven-sh/setup-bun@main
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
cache-dependency-path: './package-lock.json'
|
||||
bun-version: latest
|
||||
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
run: bun install
|
||||
|
||||
- name: Apply migrations
|
||||
working-directory: ./apps/sim
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.DATABASE_URL }}
|
||||
run: npx drizzle-kit push
|
||||
run: bunx drizzle-kit push
|
||||
|
||||
38
.gitignore
vendored
38
.gitignore
vendored
@@ -2,28 +2,24 @@
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
docs/node_modules
|
||||
/apps/**/node_modules
|
||||
/packages/**/node_modules
|
||||
scripts/node_modules
|
||||
/.pnp
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/versions
|
||||
/scripts/node_modules
|
||||
|
||||
# bun specific
|
||||
.bun
|
||||
bun.lock
|
||||
bun-debug.log*
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
/apps/**/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
sim/.next/
|
||||
sim/out/
|
||||
sim/build
|
||||
docs/.next/
|
||||
docs/out/
|
||||
docs/build
|
||||
/apps/**/out/
|
||||
/apps/**/.next/
|
||||
/apps/**/build
|
||||
|
||||
# production
|
||||
/build
|
||||
@@ -36,12 +32,6 @@ sim-standalone.tar.gz
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# env files
|
||||
.env
|
||||
*.env
|
||||
@@ -61,9 +51,9 @@ next-env.d.ts
|
||||
.cursorrules
|
||||
|
||||
# docs
|
||||
docs/.source
|
||||
docs/.contentlayer
|
||||
docs/.content-collections
|
||||
/apps/docs/.source
|
||||
/apps/docs/.contentlayer
|
||||
/apps/docs/.content-collections
|
||||
|
||||
# database instantiation
|
||||
**/postgres_data/
|
||||
|
||||
@@ -6,19 +6,17 @@ build
|
||||
|
||||
# Dependencies
|
||||
node_modules
|
||||
.bun
|
||||
|
||||
# Cache
|
||||
.cache
|
||||
.npm
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# Debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
bun-debug.log*
|
||||
|
||||
# Local env files
|
||||
.env*.local
|
||||
|
||||
24
Dockerfile
24
Dockerfile
@@ -1,24 +0,0 @@
|
||||
FROM node:20-alpine
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the entire monorepo
|
||||
COPY . ./
|
||||
|
||||
# Create the .env file if it doesn't exist
|
||||
RUN touch apps/sim/.env
|
||||
|
||||
# Install dependencies for the monorepo
|
||||
RUN npm install
|
||||
|
||||
# Install Turbo globally
|
||||
RUN npm install -g turbo
|
||||
|
||||
# Generate database schema for sim app
|
||||
RUN cd apps/sim && npx drizzle-kit generate
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
# Run migrations and start the app
|
||||
CMD cd apps/sim && npx drizzle-kit push && cd ../.. && npm run dev
|
||||
15
README.md
15
README.md
@@ -30,6 +30,7 @@ There are several ways to self-host Sim Studio:
|
||||
git clone https://github.com/simstudioai/sim.git
|
||||
|
||||
# Create environment file and update with required environment variables (BETTER_AUTH_SECRET)
|
||||
cd apps/sim
|
||||
cp .env.example .env
|
||||
|
||||
# Start Sim Studio using the provided script
|
||||
@@ -122,7 +123,7 @@ services:
|
||||
3. Open the project in your editor
|
||||
4. Click "Reopen in Container" when prompted
|
||||
5. The environment will automatically be set up
|
||||
6. Run `npm run dev` in the terminal or use the `sim-start` alias
|
||||
6. Run `bun run dev` in the terminal or use the `sim-start` alias
|
||||
|
||||
### Option 3: Manual Setup
|
||||
|
||||
@@ -130,21 +131,22 @@ services:
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/YOUR_USERNAME/sim.git
|
||||
git clone https://github.com/simstudioai/sim.git
|
||||
cd sim
|
||||
|
||||
# Install dependencies
|
||||
npm install
|
||||
bun install
|
||||
```
|
||||
|
||||
2. **Set Up Environment**
|
||||
|
||||
```bash
|
||||
cd apps/sim
|
||||
cp .env.example .env # or create a new .env file
|
||||
|
||||
# Configure your .env file with the required environment variables:
|
||||
# - Database connection (PostgreSQL)
|
||||
# - Authentication settings (Better-Auth Secret)
|
||||
# - Authentication settings (Better-Auth Secret, Better-Auth URL)
|
||||
```
|
||||
|
||||
⚠️ **Important Notes:**
|
||||
@@ -158,7 +160,7 @@ cp .env.example .env # or create a new .env file
|
||||
```bash
|
||||
# Push the database schema
|
||||
cd apps/sim
|
||||
npx drizzle-kit push
|
||||
bunx drizzle-kit push
|
||||
```
|
||||
|
||||
4. **Start Development Server**
|
||||
@@ -166,7 +168,7 @@ npx drizzle-kit push
|
||||
```bash
|
||||
# Start the development server
|
||||
cd ../..
|
||||
npm run dev
|
||||
bun run dev
|
||||
```
|
||||
|
||||
5. **Open [http://localhost:3000](http://localhost:3000) in your browser**
|
||||
@@ -174,6 +176,7 @@ npm run dev
|
||||
## Tech Stack
|
||||
|
||||
- **Framework**: [Next.js](https://nextjs.org/) (App Router)
|
||||
- **Runtime**: [Bun](https://bun.sh/)
|
||||
- **Database**: PostgreSQL with [Drizzle ORM](https://orm.drizzle.team)
|
||||
- **Authentication**: [Better Auth](https://better-auth.com)
|
||||
- **UI**: [Shadcn](https://ui.shadcn.com/), [Tailwind CSS](https://tailwindcss.com)
|
||||
|
||||
18
apps/docs/.gitignore
vendored
18
apps/docs/.gitignore
vendored
@@ -2,13 +2,11 @@
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/versions
|
||||
|
||||
# bun specific
|
||||
.bun
|
||||
bun.lockb
|
||||
bun-debug.log*
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
@@ -22,12 +20,6 @@
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# env files
|
||||
.env
|
||||
*.env
|
||||
|
||||
@@ -6,11 +6,7 @@ This is a Next.js application generated with
|
||||
Run development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
# or
|
||||
pnpm dev
|
||||
# or
|
||||
yarn dev
|
||||
bun run dev
|
||||
```
|
||||
|
||||
Open http://localhost:3000 with your browser to see the result.
|
||||
@@ -24,3 +20,4 @@ resources:
|
||||
features and API.
|
||||
- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
|
||||
- [Fumadocs](https://fumadocs.vercel.app) - learn about Fumadocs
|
||||
- [Bun Documentation](https://bun.sh/docs) - learn about Bun features and API
|
||||
|
||||
@@ -3,7 +3,7 @@ import { DocsBody, DocsDescription, DocsPage, DocsTitle } from 'fumadocs-ui/page
|
||||
import mdxComponents from '@/components/mdx-components'
|
||||
import { source } from '@/lib/source'
|
||||
|
||||
export const dynamic = 'force-static'
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export default async function Page(props: { params: Promise<{ slug?: string[] }> }) {
|
||||
const params = await props.params
|
||||
|
||||
6105
apps/docs/package-lock.json
generated
6105
apps/docs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -4,9 +4,9 @@
|
||||
"private": true,
|
||||
"license": "Apache-2.0",
|
||||
"scripts": {
|
||||
"dev": "dotenv -- next dev --port 3001",
|
||||
"build": "dotenv -- next build",
|
||||
"start": "dotenv -- next start",
|
||||
"dev": "next dev --port 3001",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"postinstall": "fumadocs-mdx"
|
||||
},
|
||||
"dependencies": {
|
||||
|
||||
18
apps/sim/.gitignore
vendored
18
apps/sim/.gitignore
vendored
@@ -3,13 +3,11 @@
|
||||
# dependencies
|
||||
/node_modules
|
||||
/packages/**/node_modules
|
||||
/.pnp
|
||||
.pnp.*
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/versions
|
||||
|
||||
# bun specific
|
||||
.bun
|
||||
bun.lockb
|
||||
bun-debug.log*
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
@@ -29,12 +27,6 @@ sim-standalone.tar.gz
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
.pnpm-debug.log*
|
||||
|
||||
# env files
|
||||
.env
|
||||
*.env
|
||||
|
||||
@@ -1,20 +1,21 @@
|
||||
'use server'
|
||||
|
||||
import { env } from '@/lib/env'
|
||||
import { isProd } from '@/lib/environment'
|
||||
|
||||
export async function getOAuthProviderStatus() {
|
||||
const githubAvailable = !!(
|
||||
process.env.GITHUB_CLIENT_ID &&
|
||||
process.env.GITHUB_CLIENT_SECRET &&
|
||||
process.env.GITHUB_CLIENT_ID !== 'placeholder' &&
|
||||
process.env.GITHUB_CLIENT_SECRET !== 'placeholder'
|
||||
env.GITHUB_CLIENT_ID &&
|
||||
env.GITHUB_CLIENT_SECRET &&
|
||||
env.GITHUB_CLIENT_ID !== 'placeholder' &&
|
||||
env.GITHUB_CLIENT_SECRET !== 'placeholder'
|
||||
)
|
||||
|
||||
const googleAvailable = !!(
|
||||
process.env.GOOGLE_CLIENT_ID &&
|
||||
process.env.GOOGLE_CLIENT_SECRET &&
|
||||
process.env.GOOGLE_CLIENT_ID !== 'placeholder' &&
|
||||
process.env.GOOGLE_CLIENT_SECRET !== 'placeholder'
|
||||
env.GOOGLE_CLIENT_ID &&
|
||||
env.GOOGLE_CLIENT_SECRET &&
|
||||
env.GOOGLE_CLIENT_ID !== 'placeholder' &&
|
||||
env.GOOGLE_CLIENT_SECRET !== 'placeholder'
|
||||
)
|
||||
|
||||
return { githubAvailable, googleAvailable, isProduction: isProd }
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { env } from '@/lib/env'
|
||||
import { isProd } from '@/lib/environment'
|
||||
import { getBaseUrl } from '@/lib/urls/utils'
|
||||
import { VerifyContent } from './verify-content'
|
||||
@@ -8,9 +9,7 @@ export const dynamic = 'force-dynamic'
|
||||
export default function VerifyPage() {
|
||||
const baseUrl = getBaseUrl()
|
||||
|
||||
const hasResendKey = Boolean(
|
||||
process.env.RESEND_API_KEY && process.env.RESEND_API_KEY !== 'placeholder'
|
||||
)
|
||||
const hasResendKey = Boolean(env.RESEND_API_KEY && env.RESEND_API_KEY !== 'placeholder')
|
||||
|
||||
return (
|
||||
<main className="flex min-h-screen flex-col items-center justify-center bg-gray-50">
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { GithubIcon } from '@/components/icons'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
/**
|
||||
* Format a number to a human-readable format (e.g., 1000 -> 1k, 1100 -> 1.1k)
|
||||
@@ -16,7 +17,7 @@ function formatNumber(num: number): string {
|
||||
}
|
||||
|
||||
async function getGitHubStars() {
|
||||
const token = process.env.GITHUB_TOKEN
|
||||
const token = env.GITHUB_TOKEN
|
||||
|
||||
try {
|
||||
const response = await fetch('https://api.github.com/repos/simstudioai/sim', {
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
import { toNextJsHandler } from 'better-auth/next-js'
|
||||
import { auth } from '@/lib/auth'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export const { GET, POST } = toNextJsHandler(auth.handler)
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('ForgetPasswordAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const { domain, accessToken, pageId, cloudId: providedCloudId } = await request.json()
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { getConfluenceCloudId } from '@/tools/confluence/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
export async function POST(request: Request) {
|
||||
try {
|
||||
const {
|
||||
|
||||
@@ -6,6 +6,8 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { account, user } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OAuthConnectionsAPI')
|
||||
|
||||
interface GoogleIdToken {
|
||||
@@ -40,7 +42,7 @@ export async function GET(request: NextRequest) {
|
||||
.where(eq(user.id, session.user.id))
|
||||
.limit(1)
|
||||
|
||||
const userEmail = userRecord.length > 0 ? userRecord[0].email : null
|
||||
const userEmail = userRecord.length > 0 ? userRecord[0]?.email : null
|
||||
|
||||
// Process accounts to determine connections
|
||||
const connections: any[] = []
|
||||
|
||||
@@ -8,6 +8,8 @@ import { OAuthService } from '@/lib/oauth'
|
||||
import { db } from '@/db'
|
||||
import { account, user } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OAuthCredentialsAPI')
|
||||
|
||||
interface GoogleIdToken {
|
||||
|
||||
@@ -5,6 +5,8 @@ import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OAuthDisconnectAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -8,6 +8,8 @@ interface DiscordChannel {
|
||||
guild_id?: string
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DiscordChannelsAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
|
||||
@@ -7,6 +7,8 @@ interface DiscordServer {
|
||||
icon: string | null
|
||||
}
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('DiscordServersAPI')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
|
||||
@@ -6,6 +6,8 @@ import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleDriveFileAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,8 @@ import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GoogleDriveFilesAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,8 @@ import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GmailLabelAPI')
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
|
||||
@@ -6,6 +6,8 @@ import { db } from '@/db'
|
||||
import { account } from '@/db/schema'
|
||||
import { refreshAccessTokenIfNeeded } from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('GmailLabelsAPI')
|
||||
|
||||
interface GmailLabel {
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console-logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('jira_issue')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console-logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('jira_issues')
|
||||
|
||||
export async function POST(request: Request) {
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextResponse } from 'next/server'
|
||||
import { Logger } from '@/lib/logs/console-logger'
|
||||
import { getJiraCloudId } from '@/tools/jira/utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('jira_projects')
|
||||
|
||||
export async function GET(request: Request) {
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getCredential, getUserId, refreshTokenIfNeeded } from '../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('OAuthTokenAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -2,6 +2,8 @@ import { NextRequest, NextResponse } from 'next/server'
|
||||
import { auth } from '@/lib/auth'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('PasswordReset')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -3,6 +3,8 @@ import { Logger } from '@/lib/logs/console-logger'
|
||||
import { markWaitlistUserAsSignedUp } from '@/lib/waitlist/service'
|
||||
import { verifyToken } from '@/lib/waitlist/token'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = new Logger('VerifyWaitlistToken')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -2,6 +2,7 @@ import { NextRequest } from 'next/server'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getBaseDomain } from '@/lib/urls/utils'
|
||||
import { encryptSecret } from '@/lib/utils'
|
||||
@@ -70,7 +71,7 @@ export async function GET(_request: NextRequest, { params }: { params: Promise<{
|
||||
// Create a new result object without the password
|
||||
const { password, ...safeData } = chatInstance[0]
|
||||
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = env.NODE_ENV === 'development'
|
||||
|
||||
const chatUrl = isDevelopment
|
||||
? `http://${chatInstance[0].subdomain}.${getBaseDomain()}`
|
||||
@@ -220,7 +221,7 @@ export async function PATCH(request: NextRequest, { params }: { params: Promise<
|
||||
|
||||
const updatedSubdomain = subdomain || existingChat[0].subdomain
|
||||
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = env.NODE_ENV === 'development'
|
||||
|
||||
const chatUrl = isDevelopment
|
||||
? `http://${updatedSubdomain}.${getBaseDomain()}`
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { NextRequest } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
describe('Chat API Route', () => {
|
||||
const mockSelect = vi.fn()
|
||||
@@ -273,7 +274,7 @@ describe('Chat API Route', () => {
|
||||
vi.stubGlobal('process', {
|
||||
...process,
|
||||
env: {
|
||||
...process.env,
|
||||
...env,
|
||||
NODE_ENV: 'development',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -3,6 +3,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { encryptSecret } from '@/lib/utils'
|
||||
import { createErrorResponse, createSuccessResponse } from '@/app/api/workflows/utils'
|
||||
@@ -168,7 +169,7 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
// Return successful response with chat URL
|
||||
// Check if we're in development or production
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = env.NODE_ENV === 'development'
|
||||
const chatUrl = isDevelopment
|
||||
? `http://${subdomain}.localhost:3000`
|
||||
: `https://${subdomain}.simstudio.ai`
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*/
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
describe('Chat API Utils', () => {
|
||||
beforeEach(() => {
|
||||
@@ -22,7 +23,7 @@ describe('Chat API Utils', () => {
|
||||
vi.stubGlobal('process', {
|
||||
...process,
|
||||
env: {
|
||||
...process.env,
|
||||
...env,
|
||||
NODE_ENV: 'development',
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { eq, sql } from 'drizzle-orm'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { persistExecutionLogs } from '@/lib/logs/execution-logger'
|
||||
import { buildTraceSpans } from '@/lib/logs/trace-spans'
|
||||
@@ -18,7 +19,7 @@ declare global {
|
||||
}
|
||||
|
||||
const logger = createLogger('ChatAuthUtils')
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = env.NODE_ENV === 'development'
|
||||
|
||||
// Simple encryption for the auth token
|
||||
export const encryptAuthToken = (subdomainId: string, type: string): string => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { unstable_noStore as noStore } from 'next/cache'
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import OpenAI from 'openai'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
@@ -9,13 +10,13 @@ export const maxDuration = 60
|
||||
|
||||
const logger = createLogger('GenerateCodeAPI')
|
||||
|
||||
const openai = process.env.OPENAI_API_KEY
|
||||
const openai = env.OPENAI_API_KEY
|
||||
? new OpenAI({
|
||||
apiKey: process.env.OPENAI_API_KEY,
|
||||
apiKey: env.OPENAI_API_KEY,
|
||||
})
|
||||
: null
|
||||
|
||||
if (!process.env.OPENAI_API_KEY) {
|
||||
if (!env.OPENAI_API_KEY) {
|
||||
logger.warn('OPENAI_API_KEY not found. Code generation API will not function.')
|
||||
}
|
||||
|
||||
@@ -222,7 +223,7 @@ Example 3 (Array Input):
|
||||
Generate ONLY the raw body of a JavaScript function based on the user's request.
|
||||
The code should be executable within an 'async function(params, environmentVariables) {...}' context.
|
||||
- 'params' (object): Contains input parameters derived from the JSON schema. Access these directly using the parameter name wrapped in angle brackets, e.g., '<paramName>'. Do NOT use 'params.paramName'.
|
||||
- 'environmentVariables' (object): Contains environment variables. Reference these using the double curly brace syntax: '{{ENV_VAR_NAME}}'. Do NOT use 'environmentVariables.VAR_NAME' or process.env.
|
||||
- 'environmentVariables' (object): Contains environment variables. Reference these using the double curly brace syntax: '{{ENV_VAR_NAME}}'. Do NOT use 'environmentVariables.VAR_NAME' or env.
|
||||
|
||||
IMPORTANT FORMATTING RULES:
|
||||
1. Reference Environment Variables: Use the exact syntax {{VARIABLE_NAME}}. Do NOT wrap it in quotes (e.g., use 'apiKey = {{SERVICE_API_KEY}}' not 'apiKey = "{{SERVICE_API_KEY}}"'). Our system replaces these placeholders before execution.
|
||||
|
||||
@@ -17,6 +17,8 @@ import {
|
||||
isS3Path,
|
||||
} from '../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesDeleteAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -11,6 +11,8 @@ import { downloadFromS3 } from '@/lib/uploads/s3-client'
|
||||
import { UPLOAD_DIR, USE_S3_STORAGE } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesParseAPI')
|
||||
|
||||
// Constants for URL downloads
|
||||
|
||||
@@ -3,7 +3,7 @@ import { PutObjectCommand } from '@aws-sdk/client-s3'
|
||||
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { s3Client } from '@/lib/uploads/s3-client'
|
||||
import { getS3Client } from '@/lib/uploads/s3-client'
|
||||
import { S3_CONFIG, USE_S3_STORAGE } from '@/lib/uploads/setup'
|
||||
import { createErrorResponse, createOptionsResponse } from '../utils'
|
||||
|
||||
@@ -52,7 +52,7 @@ export async function POST(request: NextRequest) {
|
||||
})
|
||||
|
||||
// Generate the presigned URL
|
||||
const presignedUrl = await getSignedUrl(s3Client, command, { expiresIn: 3600 })
|
||||
const presignedUrl = await getSignedUrl(getS3Client(), command, { expiresIn: 3600 })
|
||||
|
||||
// Create a path for API to serve the file
|
||||
const servePath = `/api/files/serve/s3/${encodeURIComponent(uniqueKey)}`
|
||||
|
||||
@@ -12,6 +12,8 @@ import {
|
||||
getContentType,
|
||||
} from '../../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesServeAPI')
|
||||
|
||||
/**
|
||||
|
||||
@@ -9,6 +9,8 @@ import { UPLOAD_DIR, USE_S3_STORAGE } from '@/lib/uploads/setup'
|
||||
import '@/lib/uploads/setup.server'
|
||||
import { createErrorResponse, createOptionsResponse, InvalidRequestError } from '../utils'
|
||||
|
||||
export const dynamic = 'force-dynamic'
|
||||
|
||||
const logger = createLogger('FilesUploadAPI')
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { FreestyleSandboxes } from 'freestyle-sandboxes'
|
||||
import { createContext, Script } from 'vm'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
// Explicitly export allowed methods
|
||||
@@ -27,8 +28,8 @@ function resolveCodeVariables(
|
||||
const envVarMatches = resolvedCode.match(/\{\{([^}]+)\}\}/g) || []
|
||||
for (const match of envVarMatches) {
|
||||
const varName = match.slice(2, -2).trim()
|
||||
// Priority: 1. Environment variables from workflow, 2. Params, 3. process.env
|
||||
const varValue = envVars[varName] || params[varName] || process.env[varName] || ''
|
||||
// Priority: 1. Environment variables from workflow, 2. Params
|
||||
const varValue = envVars[varName] || params[varName] || ''
|
||||
// Wrap the value in quotes to ensure it's treated as a string literal
|
||||
resolvedCode = resolvedCode.replace(match, JSON.stringify(varValue))
|
||||
}
|
||||
@@ -72,7 +73,7 @@ export async function POST(req: NextRequest) {
|
||||
let executionMethod = 'vm' // Default execution method
|
||||
|
||||
// Try to use Freestyle if the API key is available
|
||||
if (process.env.FREESTYLE_API_KEY) {
|
||||
if (env.FREESTYLE_API_KEY) {
|
||||
try {
|
||||
logger.info(`[${requestId}] Using Freestyle for code execution`)
|
||||
executionMethod = 'freestyle'
|
||||
@@ -99,7 +100,7 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
|
||||
const freestyle = new FreestyleSandboxes({
|
||||
apiKey: process.env.FREESTYLE_API_KEY,
|
||||
apiKey: env.FREESTYLE_API_KEY,
|
||||
})
|
||||
|
||||
// Wrap code in export default to match Freestyle's expectations
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { Resend } from 'resend'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const resend = process.env.RESEND_API_KEY ? new Resend(process.env.RESEND_API_KEY) : null
|
||||
const resend = env.RESEND_API_KEY ? new Resend(env.RESEND_API_KEY) : null
|
||||
const logger = createLogger('HelpAPI')
|
||||
|
||||
// Define schema for validation
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { PutObjectCommand } from '@aws-sdk/client-s3'
|
||||
import { and, eq, inArray, lt, sql } from 'drizzle-orm'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { s3Client } from '@/lib/uploads/s3-client'
|
||||
import { getS3Client } from '@/lib/uploads/s3-client'
|
||||
import { db } from '@/db'
|
||||
import { subscription, user, workflow, workflowLogs } from '@/db/schema'
|
||||
|
||||
@@ -12,19 +13,19 @@ const logger = createLogger('LogsCleanup')
|
||||
|
||||
const BATCH_SIZE = 2000
|
||||
const S3_CONFIG = {
|
||||
bucket: process.env.S3_LOGS_BUCKET_NAME || '',
|
||||
region: process.env.AWS_REGION || '',
|
||||
bucket: env.S3_LOGS_BUCKET_NAME || '',
|
||||
region: env.AWS_REGION || '',
|
||||
}
|
||||
|
||||
export async function GET(request: Request) {
|
||||
try {
|
||||
const authHeader = request.headers.get('authorization')
|
||||
|
||||
if (!process.env.CRON_SECRET) {
|
||||
if (!env.CRON_SECRET) {
|
||||
return new NextResponse('Configuration error: Cron secret is not set', { status: 500 })
|
||||
}
|
||||
|
||||
if (!authHeader || authHeader !== `Bearer ${process.env.CRON_SECRET}`) {
|
||||
if (!authHeader || authHeader !== `Bearer ${env.CRON_SECRET}`) {
|
||||
logger.warn(`Unauthorized access attempt to logs cleanup endpoint`)
|
||||
return new NextResponse('Unauthorized', { status: 401 })
|
||||
}
|
||||
@@ -34,9 +35,7 @@ export async function GET(request: Request) {
|
||||
}
|
||||
|
||||
const retentionDate = new Date()
|
||||
retentionDate.setDate(
|
||||
retentionDate.getDate() - Number(process.env.FREE_PLAN_LOG_RETENTION_DAYS || '7')
|
||||
)
|
||||
retentionDate.setDate(retentionDate.getDate() - Number(env.FREE_PLAN_LOG_RETENTION_DAYS || '7'))
|
||||
|
||||
const freeUsers = await db
|
||||
.select({ userId: user.id })
|
||||
@@ -111,7 +110,7 @@ export async function GET(request: Request) {
|
||||
const logData = JSON.stringify(log)
|
||||
|
||||
try {
|
||||
await s3Client.send(
|
||||
await getS3Client().send(
|
||||
new PutObjectCommand({
|
||||
Bucket: S3_CONFIG.bucket,
|
||||
Key: logKey,
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const logger = createLogger('TelemetryAPI')
|
||||
@@ -84,8 +85,8 @@ async function forwardToCollector(data: any): Promise<boolean> {
|
||||
return false
|
||||
}
|
||||
|
||||
const endpoint = process.env.TELEMETRY_ENDPOINT || 'https://telemetry.simstudio.ai/v1/traces'
|
||||
const timeout = parseInt(process.env.TELEMETRY_TIMEOUT || '') || DEFAULT_TIMEOUT
|
||||
const endpoint = env.TELEMETRY_ENDPOINT || 'https://telemetry.simstudio.ai/v1/traces'
|
||||
const timeout = DEFAULT_TIMEOUT
|
||||
|
||||
try {
|
||||
const timestamp = Date.now() * 1000000
|
||||
@@ -96,11 +97,11 @@ async function forwardToCollector(data: any): Promise<boolean> {
|
||||
{ key: 'service.name', value: { stringValue: 'sim-studio' } },
|
||||
{
|
||||
key: 'service.version',
|
||||
value: { stringValue: process.env.NEXT_PUBLIC_APP_VERSION || '0.1.0' },
|
||||
value: { stringValue: '0.1.0' },
|
||||
},
|
||||
{
|
||||
key: 'deployment.environment',
|
||||
value: { stringValue: process.env.NODE_ENV || 'production' },
|
||||
value: { stringValue: env.NODE_ENV || 'production' },
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { Stagehand } from '@browserbasehq/stagehand'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { ensureZodObject, normalizeUrl } from '../utils'
|
||||
|
||||
const logger = createLogger('StagehandAgentAPI')
|
||||
|
||||
const BROWSERBASE_API_KEY = process.env.BROWSERBASE_API_KEY
|
||||
const BROWSERBASE_PROJECT_ID = process.env.BROWSERBASE_PROJECT_ID
|
||||
// Environment variables for Browserbase
|
||||
const BROWSERBASE_API_KEY = env.BROWSERBASE_API_KEY
|
||||
const BROWSERBASE_PROJECT_ID = env.BROWSERBASE_PROJECT_ID
|
||||
|
||||
const requestSchema = z.object({
|
||||
task: z.string().min(1),
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { Stagehand } from '@browserbasehq/stagehand'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { ensureZodObject, normalizeUrl } from '../utils'
|
||||
|
||||
const logger = createLogger('StagehandExtractAPI')
|
||||
|
||||
const BROWSERBASE_API_KEY = process.env.BROWSERBASE_API_KEY
|
||||
const BROWSERBASE_PROJECT_ID = process.env.BROWSERBASE_PROJECT_ID
|
||||
// Environment variables for Browserbase
|
||||
const BROWSERBASE_API_KEY = env.BROWSERBASE_API_KEY
|
||||
const BROWSERBASE_PROJECT_ID = env.BROWSERBASE_PROJECT_ID
|
||||
|
||||
const requestSchema = z.object({
|
||||
instruction: z.string(),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { NextRequest, NextResponse } from 'next/server'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { env } from '@/lib/env'
|
||||
import { Logger } from '@/lib/logs/console-logger'
|
||||
import { acquireLock, releaseLock } from '@/lib/redis'
|
||||
import { pollGmailWebhooks } from '@/lib/webhooks/gmail-polling-service'
|
||||
@@ -20,7 +21,7 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
try {
|
||||
const authHeader = request.headers.get('authorization')
|
||||
const webhookSecret = process.env.CRON_SECRET || process.env.WEBHOOK_POLLING_SECRET
|
||||
const webhookSecret = env.CRON_SECRET
|
||||
|
||||
if (!webhookSecret) {
|
||||
return new NextResponse('Configuration error: Webhook secret is not set', { status: 500 })
|
||||
|
||||
@@ -2,6 +2,7 @@ import { NextRequest, NextResponse } from 'next/server'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import { webhook, workflow } from '@/db/schema'
|
||||
@@ -254,7 +255,7 @@ async function createAirtableWebhookSubscription(
|
||||
const requestOrigin = new URL(request.url).origin
|
||||
// Ensure origin does not point to localhost for external API calls
|
||||
const effectiveOrigin = requestOrigin.includes('localhost')
|
||||
? process.env.NEXT_PUBLIC_APP_URL || requestOrigin // Use env var if available, fallback to original
|
||||
? env.NEXT_PUBLIC_APP_URL || requestOrigin // Use env var if available, fallback to original
|
||||
: requestOrigin
|
||||
|
||||
const notificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${path}`
|
||||
@@ -366,7 +367,7 @@ async function createTelegramWebhookSubscription(
|
||||
const requestOrigin = new URL(request.url).origin
|
||||
// Ensure origin does not point to localhost for external API calls
|
||||
const effectiveOrigin = requestOrigin.includes('localhost')
|
||||
? process.env.NEXT_PUBLIC_APP_URL || requestOrigin // Use env var if available, fallback to original
|
||||
? env.NEXT_PUBLIC_APP_URL || requestOrigin // Use env var if available, fallback to original
|
||||
: requestOrigin
|
||||
|
||||
const notificationUrl = `${effectiveOrigin}/api/webhooks/trigger/${path}`
|
||||
|
||||
@@ -113,7 +113,7 @@ export default function ChatClient({ subdomain }: { subdomain: string }) {
|
||||
// Fetch chat config function
|
||||
const fetchChatConfig = async () => {
|
||||
try {
|
||||
// Use relative URL instead of absolute URL with process.env.NEXT_PUBLIC_APP_URL
|
||||
// Use relative URL instead of absolute URL with env.NEXT_PUBLIC_APP_URL
|
||||
const response = await fetch(`/api/chat/${subdomain}`, {
|
||||
credentials: 'same-origin',
|
||||
headers: {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { Metadata, Viewport } from 'next'
|
||||
import { Analytics } from '@vercel/analytics/next'
|
||||
import { SpeedInsights } from '@vercel/speed-insights/next'
|
||||
import { PublicEnvScript } from 'next-runtime-env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { TelemetryConsentDialog } from '@/app/telemetry-consent-dialog'
|
||||
import './globals.css'
|
||||
@@ -8,7 +9,6 @@ import { ZoomPrevention } from './zoom-prevention'
|
||||
|
||||
const logger = createLogger('RootLayout')
|
||||
|
||||
// Add browser extension attributes that we want to ignore
|
||||
const BROWSER_EXTENSION_ATTRIBUTES = [
|
||||
'data-new-gr-c-s-check-loaded',
|
||||
'data-gr-ext-installed',
|
||||
@@ -16,7 +16,6 @@ const BROWSER_EXTENSION_ATTRIBUTES = [
|
||||
'data-grammarly',
|
||||
'data-fgm',
|
||||
'data-lt-installed',
|
||||
// Add other known extension attributes here
|
||||
]
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
@@ -156,6 +155,8 @@ export default function RootLayout({ children }: { children: React.ReactNode })
|
||||
<meta property="og:image" content="https://simstudio.ai/social/instagram.png" />
|
||||
<meta property="og:image:width" content="1080" />
|
||||
<meta property="og:image:height" content="1080" />
|
||||
|
||||
<PublicEnvScript />
|
||||
</head>
|
||||
<body suppressHydrationWarning>
|
||||
<ZoomPrevention />
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
AlertDialogTitle,
|
||||
} from '@/components/ui/alert-dialog'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { getNodeEnv } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
|
||||
@@ -49,7 +50,7 @@ export function TelemetryConsentDialog() {
|
||||
const loadSettings = useGeneralStore((state) => state.loadSettings)
|
||||
|
||||
const hasShownDialogThisSession = useRef(false)
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = getNodeEnv() === 'development'
|
||||
|
||||
// Check localStorage for saved preferences
|
||||
useEffect(() => {
|
||||
@@ -101,7 +102,7 @@ export function TelemetryConsentDialog() {
|
||||
telemetryNotifiedUser,
|
||||
telemetryEnabled,
|
||||
hasShownInSession: hasShownDialogThisSession.current,
|
||||
environment: process.env.NODE_ENV,
|
||||
environment: getNodeEnv(),
|
||||
})
|
||||
|
||||
const localStorageNotified =
|
||||
|
||||
@@ -30,6 +30,8 @@ import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { env } from '@/lib/env'
|
||||
import { getNodeEnv } from '@/lib/environment'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getBaseDomain } from '@/lib/urls/utils'
|
||||
import { cn } from '@/lib/utils'
|
||||
@@ -53,7 +55,7 @@ interface ChatDeployProps {
|
||||
|
||||
type AuthType = 'public' | 'password' | 'email'
|
||||
|
||||
const isDevelopment = process.env.NODE_ENV === 'development'
|
||||
const isDevelopment = getNodeEnv() === 'development'
|
||||
|
||||
const getDomainSuffix = (() => {
|
||||
const suffix = isDevelopment ? `.${getBaseDomain()}` : '.simstudio.ai'
|
||||
|
||||
@@ -18,6 +18,7 @@ import { Card, CardContent } from '@/components/ui/card'
|
||||
import { CopyButton } from '@/components/ui/copy-button'
|
||||
import { Dialog, DialogContent, DialogHeader, DialogTitle } from '@/components/ui/dialog'
|
||||
import { TabsContent } from '@/components/ui/tabs'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useNotificationStore } from '@/stores/notifications/store'
|
||||
@@ -217,7 +218,7 @@ export function DeployModal({
|
||||
}
|
||||
|
||||
const data = await response.json()
|
||||
const endpoint = `${process.env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`
|
||||
const endpoint = `${env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`
|
||||
const inputFormatExample = getInputFormatExample()
|
||||
|
||||
setDeploymentInfo({
|
||||
@@ -278,7 +279,7 @@ export function DeployModal({
|
||||
setNeedsRedeployment(false)
|
||||
|
||||
// Update the local deployment info
|
||||
const endpoint = `${process.env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`
|
||||
const endpoint = `${env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`
|
||||
const inputFormatExample = getInputFormatExample()
|
||||
|
||||
const newDeploymentInfo = {
|
||||
@@ -603,7 +604,7 @@ export function DeployModal({
|
||||
<DeployForm
|
||||
apiKeys={apiKeys}
|
||||
keysLoaded={keysLoaded}
|
||||
endpointUrl={`${process.env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`}
|
||||
endpointUrl={`${env.NEXT_PUBLIC_APP_URL}/api/workflows/${workflowId}/execute`}
|
||||
workflowId={workflowId || ''}
|
||||
onSubmit={onDeploy}
|
||||
getInputFormatExample={getInputFormatExample}
|
||||
|
||||
@@ -286,18 +286,6 @@ export function ControlBar() {
|
||||
async function checkStatus() {
|
||||
if (!activeWorkflowId) return
|
||||
|
||||
// Skip API call in localStorage mode
|
||||
if (
|
||||
typeof window !== 'undefined' &&
|
||||
(localStorage.getItem('USE_LOCAL_STORAGE') === 'true' ||
|
||||
process.env.NEXT_PUBLIC_USE_LOCAL_STORAGE === 'true' ||
|
||||
process.env.NEXT_PUBLIC_DISABLE_DB_SYNC === 'true')
|
||||
) {
|
||||
// For localStorage mode, we already have the status in the workflow store
|
||||
// Nothing more to do as the useWorkflowStore already has this information
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${activeWorkflowId}/status`)
|
||||
if (response.ok) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
CommandList,
|
||||
} from '@/components/ui/command'
|
||||
import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import {
|
||||
Credential,
|
||||
@@ -248,7 +249,7 @@ export function GoogleDrivePicker({
|
||||
showUploadFolders: true,
|
||||
supportDrives: true,
|
||||
multiselect: false,
|
||||
appId: process.env.NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER,
|
||||
appId: env.NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER,
|
||||
// Enable folder selection when mimeType is folder
|
||||
setSelectFolderEnabled: mimeTypeFilter?.includes('folder') ? true : false,
|
||||
callbackFunction: (data) => {
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { env } from '@/lib/env'
|
||||
import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
import { SubBlockConfig } from '@/blocks/types'
|
||||
import { ConfluenceFileInfo, ConfluenceFileSelector } from './components/confluence-file-selector'
|
||||
@@ -80,8 +81,8 @@ export function FileSelectorInput({ blockId, subBlock, disabled = false }: FileS
|
||||
}
|
||||
|
||||
// For Google Drive
|
||||
const clientId = process.env.NEXT_PUBLIC_GOOGLE_CLIENT_ID || ''
|
||||
const apiKey = process.env.NEXT_PUBLIC_GOOGLE_API_KEY || ''
|
||||
const clientId = env.NEXT_PUBLIC_GOOGLE_CLIENT_ID || ''
|
||||
const apiKey = env.NEXT_PUBLIC_GOOGLE_API_KEY || ''
|
||||
|
||||
// Render Discord channel selector
|
||||
if (isDiscord) {
|
||||
|
||||
@@ -6,7 +6,12 @@ import { BlockConfig, SubBlockConfig, SubBlockLayout, SubBlockType } from '../ty
|
||||
|
||||
const logger = createLogger('FileBlock')
|
||||
|
||||
const isS3Enabled = process.env.USE_S3 === 'true'
|
||||
// Create a safe client-only env subset to avoid server-side env access errors
|
||||
const clientEnv = {
|
||||
USE_S3: process.env.USE_S3,
|
||||
}
|
||||
|
||||
const isS3Enabled = clientEnv.USE_S3
|
||||
const shouldEnableURLInput = isProd || isS3Enabled
|
||||
|
||||
// Define sub-blocks conditionally
|
||||
|
||||
@@ -3,7 +3,12 @@ import { isProd } from '@/lib/environment'
|
||||
import { MistralParserOutput } from '@/tools/mistral/types'
|
||||
import { BlockConfig, SubBlockConfig, SubBlockLayout, SubBlockType } from '../types'
|
||||
|
||||
const isS3Enabled = process.env.USE_S3 === 'true'
|
||||
// Create a safe client-only env subset to avoid server-side env access errors
|
||||
const clientEnv = {
|
||||
USE_S3: process.env.USE_S3,
|
||||
}
|
||||
|
||||
const isS3Enabled = clientEnv.USE_S3
|
||||
const shouldEnableFileUpload = isProd || isS3Enabled
|
||||
|
||||
// Define the input method selector block when needed
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import * as React from 'react'
|
||||
import { Container, Img, Link, Section, Text } from '@react-email/components'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
interface EmailFooterProps {
|
||||
baseUrl?: string
|
||||
}
|
||||
|
||||
export const EmailFooter = ({
|
||||
baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai',
|
||||
baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai',
|
||||
}: EmailFooterProps) => {
|
||||
return (
|
||||
<Container>
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
Text,
|
||||
} from '@react-email/components'
|
||||
import { format } from 'date-fns'
|
||||
import { env } from '@/lib/env'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
@@ -24,7 +25,7 @@ interface InvitationEmailProps {
|
||||
updatedDate?: Date
|
||||
}
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
|
||||
export const InvitationEmail = ({
|
||||
inviterName = 'A team member',
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
Section,
|
||||
Text,
|
||||
} from '@react-email/components'
|
||||
import { env } from '@/lib/env'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
@@ -21,7 +22,7 @@ interface OTPVerificationEmailProps {
|
||||
chatTitle?: string
|
||||
}
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
|
||||
const getSubjectByType = (type: string, chatTitle?: string) => {
|
||||
switch (type) {
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
Text,
|
||||
} from '@react-email/components'
|
||||
import { format } from 'date-fns'
|
||||
import { env } from '@/lib/env'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
@@ -22,7 +23,7 @@ interface ResetPasswordEmailProps {
|
||||
updatedDate?: Date
|
||||
}
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
|
||||
export const ResetPasswordEmail = ({
|
||||
username = '',
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
Section,
|
||||
Text,
|
||||
} from '@react-email/components'
|
||||
import { env } from '@/lib/env'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
@@ -20,7 +21,7 @@ interface WaitlistApprovalEmailProps {
|
||||
signupLink?: string
|
||||
}
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
|
||||
export const WaitlistApprovalEmail = ({
|
||||
email = '',
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
Section,
|
||||
Text,
|
||||
} from '@react-email/components'
|
||||
import { env } from '@/lib/env'
|
||||
import { baseStyles } from './base-styles'
|
||||
import EmailFooter from './footer'
|
||||
|
||||
@@ -19,7 +20,7 @@ interface WaitlistConfirmationEmailProps {
|
||||
email?: string
|
||||
}
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || 'https://simstudio.ai'
|
||||
const typeformLink = 'https://form.typeform.com/to/jqCO12pF'
|
||||
|
||||
export const WaitlistConfirmationEmail = ({ email = '' }: WaitlistConfirmationEmailProps) => {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { drizzle } from 'drizzle-orm/postgres-js'
|
||||
import postgres from 'postgres'
|
||||
import { env } from '@/lib/env'
|
||||
|
||||
// In production, use the Vercel-generated POSTGRES_URL
|
||||
// In development, use the direct DATABASE_URL
|
||||
const connectionString = process.env.POSTGRES_URL || process.env.DATABASE_URL!
|
||||
const connectionString = env.POSTGRES_URL ?? env.DATABASE_URL
|
||||
|
||||
// Disable prefetch as it is not supported for "Transaction" pool mode
|
||||
const client = postgres(connectionString, {
|
||||
|
||||
@@ -205,13 +205,7 @@
|
||||
"tag": "0028_absent_triton",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 29,
|
||||
"version": "7",
|
||||
"when": 1745211620858,
|
||||
"tag": "0029_grey_barracuda",
|
||||
"breakpoints": true
|
||||
},
|
||||
|
||||
{
|
||||
"idx": 30,
|
||||
"version": "7",
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import * as dotenv from 'dotenv'
|
||||
import type { Config } from 'drizzle-kit'
|
||||
|
||||
dotenv.config({ path: '../../.env' })
|
||||
import { env } from './lib/env'
|
||||
|
||||
export default {
|
||||
schema: './db/schema.ts',
|
||||
out: './db/migrations',
|
||||
dialect: 'postgresql',
|
||||
dbCredentials: {
|
||||
url: process.env.DATABASE_URL!,
|
||||
url: env.DATABASE_URL,
|
||||
},
|
||||
} satisfies Config
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { getAllBlocks } from '@/blocks'
|
||||
import { BlockOutput } from '@/blocks/types'
|
||||
@@ -218,7 +219,7 @@ export class AgentBlockHandler implements BlockHandler {
|
||||
hasOutgoingConnections,
|
||||
})
|
||||
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || ''
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || ''
|
||||
const url = new URL('/api/providers', baseUrl)
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { BlockOutput } from '@/blocks/types'
|
||||
import { getProviderFromModel } from '@/providers/utils'
|
||||
@@ -102,7 +103,7 @@ export class EvaluatorBlockHandler implements BlockHandler {
|
||||
}
|
||||
|
||||
try {
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || ''
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || ''
|
||||
const url = new URL('/api/providers', baseUrl)
|
||||
|
||||
// Make sure we force JSON output in the request
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { generateRouterPrompt } from '@/blocks/blocks/router'
|
||||
import { BlockOutput } from '@/blocks/types'
|
||||
@@ -38,7 +39,7 @@ export class RouterBlockHandler implements BlockHandler {
|
||||
const providerId = getProviderFromModel(routerConfig.model)
|
||||
|
||||
try {
|
||||
const baseUrl = process.env.NEXT_PUBLIC_APP_URL || ''
|
||||
const baseUrl = env.NEXT_PUBLIC_APP_URL || ''
|
||||
const url = new URL('/api/providers', baseUrl)
|
||||
|
||||
// Create the provider request with proper message formatting
|
||||
|
||||
@@ -19,11 +19,17 @@ import {
|
||||
makeFetchTransport,
|
||||
} from '@sentry/nextjs'
|
||||
|
||||
const clientEnv = {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_PUBLIC_SENTRY_DSN: process.env.NEXT_PUBLIC_SENTRY_DSN,
|
||||
NEXT_TELEMETRY_DISABLED: process.env.NEXT_TELEMETRY_DISABLED,
|
||||
}
|
||||
|
||||
// Only in production
|
||||
if (typeof window !== 'undefined' && process.env.NODE_ENV === 'production') {
|
||||
if (typeof window !== 'undefined' && clientEnv.NODE_ENV === 'production') {
|
||||
const client = new BrowserClient({
|
||||
dsn: process.env.NEXT_PUBLIC_SENTRY_DSN || undefined,
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
dsn: clientEnv.NEXT_PUBLIC_SENTRY_DSN || undefined,
|
||||
environment: clientEnv.NODE_ENV || 'development',
|
||||
transport: makeFetchTransport,
|
||||
stackParser: defaultStackParser,
|
||||
integrations: [breadcrumbsIntegration(), dedupeIntegration(), linkedErrorsIntegration()],
|
||||
@@ -40,14 +46,14 @@ if (typeof window !== 'undefined' && process.env.NODE_ENV === 'production') {
|
||||
}
|
||||
|
||||
export const onRouterTransitionStart =
|
||||
process.env.NODE_ENV === 'production' ? captureRouterTransitionStart : () => {}
|
||||
clientEnv.NODE_ENV === 'production' ? captureRouterTransitionStart : () => {}
|
||||
|
||||
if (typeof window !== 'undefined') {
|
||||
const TELEMETRY_STATUS_KEY = 'simstudio-telemetry-status'
|
||||
let telemetryEnabled = true
|
||||
|
||||
try {
|
||||
if (process.env.NEXT_TELEMETRY_DISABLED === '1') {
|
||||
if (clientEnv.NEXT_TELEMETRY_DISABLED === '1') {
|
||||
telemetryEnabled = false
|
||||
} else {
|
||||
const storedPreference = localStorage.getItem(TELEMETRY_STATUS_KEY)
|
||||
|
||||
143
apps/sim/instrumentation-server.ts
Normal file
143
apps/sim/instrumentation-server.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/**
|
||||
* Sim Studio Telemetry - Server-side Instrumentation
|
||||
*
|
||||
* This file contains all server-side instrumentation logic.
|
||||
*/
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { env } from './lib/env.ts'
|
||||
|
||||
const Sentry =
|
||||
process.env.NODE_ENV === 'production'
|
||||
? require('@sentry/nextjs')
|
||||
: { captureRequestError: () => {} }
|
||||
|
||||
const logger = createLogger('OtelInstrumentation')
|
||||
|
||||
const DEFAULT_TELEMETRY_CONFIG = {
|
||||
endpoint: env.TELEMETRY_ENDPOINT || 'https://telemetry.simstudio.ai/v1/traces',
|
||||
serviceName: 'sim-studio',
|
||||
serviceVersion: '0.1.0',
|
||||
serverSide: { enabled: true },
|
||||
batchSettings: {
|
||||
maxQueueSize: 100,
|
||||
maxExportBatchSize: 10,
|
||||
scheduledDelayMillis: 5000,
|
||||
exportTimeoutMillis: 30000,
|
||||
},
|
||||
}
|
||||
|
||||
// Initialize OpenTelemetry
|
||||
async function initializeOpenTelemetry() {
|
||||
try {
|
||||
if (env.NEXT_TELEMETRY_DISABLED === '1') {
|
||||
logger.info('OpenTelemetry telemetry disabled via environment variable')
|
||||
return
|
||||
}
|
||||
|
||||
let telemetryConfig
|
||||
try {
|
||||
// Use dynamic import for ES modules
|
||||
telemetryConfig = (await import('./telemetry.config.ts')).default
|
||||
} catch (e) {
|
||||
telemetryConfig = DEFAULT_TELEMETRY_CONFIG
|
||||
}
|
||||
|
||||
if (telemetryConfig.serverSide?.enabled === false) {
|
||||
logger.info('Server-side OpenTelemetry instrumentation is disabled in config')
|
||||
return
|
||||
}
|
||||
|
||||
// Dynamic imports for server-side libraries
|
||||
const { NodeSDK } = await import('@opentelemetry/sdk-node')
|
||||
const { resourceFromAttributes } = await import('@opentelemetry/resources')
|
||||
const { SemanticResourceAttributes } = await import('@opentelemetry/semantic-conventions')
|
||||
const { BatchSpanProcessor } = await import('@opentelemetry/sdk-trace-node')
|
||||
const { OTLPTraceExporter } = await import('@opentelemetry/exporter-trace-otlp-http')
|
||||
|
||||
const exporter = new OTLPTraceExporter({
|
||||
url: telemetryConfig.endpoint,
|
||||
})
|
||||
|
||||
const spanProcessor = new BatchSpanProcessor(exporter, {
|
||||
maxQueueSize:
|
||||
telemetryConfig.batchSettings?.maxQueueSize ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.maxQueueSize,
|
||||
maxExportBatchSize:
|
||||
telemetryConfig.batchSettings?.maxExportBatchSize ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.maxExportBatchSize,
|
||||
scheduledDelayMillis:
|
||||
telemetryConfig.batchSettings?.scheduledDelayMillis ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.scheduledDelayMillis,
|
||||
exportTimeoutMillis:
|
||||
telemetryConfig.batchSettings?.exportTimeoutMillis ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.exportTimeoutMillis,
|
||||
})
|
||||
|
||||
const configResource = resourceFromAttributes({
|
||||
[SemanticResourceAttributes.SERVICE_NAME]: telemetryConfig.serviceName,
|
||||
[SemanticResourceAttributes.SERVICE_VERSION]: telemetryConfig.serviceVersion,
|
||||
[SemanticResourceAttributes.DEPLOYMENT_ENVIRONMENT]: env.NODE_ENV,
|
||||
})
|
||||
|
||||
const sdk = new NodeSDK({
|
||||
resource: configResource,
|
||||
spanProcessors: [spanProcessor],
|
||||
})
|
||||
|
||||
sdk.start()
|
||||
|
||||
const shutdownHandler = async () => {
|
||||
await sdk
|
||||
.shutdown()
|
||||
.then(() => logger.info('OpenTelemetry SDK shut down successfully'))
|
||||
.catch((err) => logger.error('Error shutting down OpenTelemetry SDK', err))
|
||||
}
|
||||
|
||||
process.on('SIGTERM', shutdownHandler)
|
||||
process.on('SIGINT', shutdownHandler)
|
||||
|
||||
logger.info('OpenTelemetry instrumentation initialized for server-side telemetry')
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize OpenTelemetry instrumentation', error)
|
||||
}
|
||||
}
|
||||
|
||||
async function initializeSentry() {
|
||||
if (env.NODE_ENV !== 'production') return
|
||||
|
||||
try {
|
||||
const Sentry = await import('@sentry/nextjs')
|
||||
|
||||
// Skip initialization if Sentry appears to be already configured
|
||||
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
||||
// @ts-ignore accessing internal API
|
||||
if ((Sentry as any).Hub?.current?.getClient()) {
|
||||
logger.debug('Sentry already initialized, skipping duplicate init')
|
||||
return
|
||||
}
|
||||
|
||||
Sentry.init({
|
||||
dsn: env.NEXT_PUBLIC_SENTRY_DSN || undefined,
|
||||
enabled: true,
|
||||
environment: env.NODE_ENV || 'development',
|
||||
tracesSampleRate: 0.2,
|
||||
beforeSend(event) {
|
||||
if (event.request && typeof event.request === 'object') {
|
||||
;(event.request as any).ip = null
|
||||
}
|
||||
return event
|
||||
},
|
||||
})
|
||||
|
||||
logger.info('Sentry initialized (server-side)')
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize Sentry', error as Error)
|
||||
}
|
||||
}
|
||||
|
||||
export async function register() {
|
||||
await initializeSentry()
|
||||
await initializeOpenTelemetry()
|
||||
}
|
||||
|
||||
export const onRequestError = Sentry.captureRequestError
|
||||
@@ -1,122 +1,9 @@
|
||||
/**
|
||||
* Sim Studio Telemetry - Server-side Instrumentation
|
||||
*
|
||||
* This file can be customized in forked repositories:
|
||||
* - Set TELEMETRY_ENDPOINT env var to your collector
|
||||
* - Modify exporter configuration as needed
|
||||
*
|
||||
* Please maintain ethical telemetry practices if modified.
|
||||
*/
|
||||
// This file enables OpenTelemetry instrumentation for Next.js
|
||||
// See: https://nextjs.org/docs/app/building-your-application/optimizing/open-telemetry
|
||||
// Set experimental.instrumentationHook = true in next.config.ts to enable this
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
|
||||
const Sentry =
|
||||
process.env.NODE_ENV === 'production'
|
||||
? require('@sentry/nextjs')
|
||||
: { captureRequestError: () => {} }
|
||||
|
||||
const logger = createLogger('OtelInstrumentation')
|
||||
|
||||
const DEFAULT_TELEMETRY_CONFIG = {
|
||||
endpoint: process.env.TELEMETRY_ENDPOINT || 'https://telemetry.simstudio.ai/v1/traces',
|
||||
serviceName: 'sim-studio',
|
||||
serviceVersion: process.env.NEXT_PUBLIC_APP_VERSION || '0.1.0',
|
||||
serverSide: { enabled: true },
|
||||
batchSettings: {
|
||||
maxQueueSize: 100,
|
||||
maxExportBatchSize: 10,
|
||||
scheduledDelayMillis: 5000,
|
||||
exportTimeoutMillis: 30000,
|
||||
},
|
||||
}
|
||||
|
||||
export async function register() {
|
||||
if (process.env.NODE_ENV === 'production') {
|
||||
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||
await import('./sentry.server.config')
|
||||
}
|
||||
|
||||
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||
await import('./sentry.edge.config')
|
||||
}
|
||||
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||
await import('./instrumentation-server')
|
||||
}
|
||||
|
||||
// OpenTelemetry instrumentation
|
||||
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
||||
try {
|
||||
if (process.env.NEXT_TELEMETRY_DISABLED === '1') {
|
||||
logger.info('OpenTelemetry telemetry disabled via environment variable')
|
||||
return
|
||||
}
|
||||
|
||||
let telemetryConfig
|
||||
try {
|
||||
// Use dynamic import instead of require for ES modules
|
||||
telemetryConfig = (await import('./telemetry.config.ts')).default
|
||||
} catch (e) {
|
||||
telemetryConfig = DEFAULT_TELEMETRY_CONFIG
|
||||
}
|
||||
|
||||
if (telemetryConfig.serverSide?.enabled === false) {
|
||||
logger.info('Server-side OpenTelemetry instrumentation is disabled in config')
|
||||
return
|
||||
}
|
||||
|
||||
const { NodeSDK } = await import('@opentelemetry/sdk-node')
|
||||
const { resourceFromAttributes } = await import('@opentelemetry/resources')
|
||||
const { SemanticResourceAttributes } = await import('@opentelemetry/semantic-conventions')
|
||||
const { BatchSpanProcessor } = await import('@opentelemetry/sdk-trace-node')
|
||||
const { OTLPTraceExporter } = await import('@opentelemetry/exporter-trace-otlp-http')
|
||||
|
||||
const exporter = new OTLPTraceExporter({
|
||||
url: telemetryConfig.endpoint,
|
||||
})
|
||||
|
||||
const spanProcessor = new BatchSpanProcessor(exporter, {
|
||||
maxQueueSize:
|
||||
telemetryConfig.batchSettings?.maxQueueSize ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.maxQueueSize,
|
||||
maxExportBatchSize:
|
||||
telemetryConfig.batchSettings?.maxExportBatchSize ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.maxExportBatchSize,
|
||||
scheduledDelayMillis:
|
||||
telemetryConfig.batchSettings?.scheduledDelayMillis ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.scheduledDelayMillis,
|
||||
exportTimeoutMillis:
|
||||
telemetryConfig.batchSettings?.exportTimeoutMillis ||
|
||||
DEFAULT_TELEMETRY_CONFIG.batchSettings.exportTimeoutMillis,
|
||||
})
|
||||
|
||||
const configResource = resourceFromAttributes({
|
||||
[SemanticResourceAttributes.SERVICE_NAME]: telemetryConfig.serviceName,
|
||||
[SemanticResourceAttributes.SERVICE_VERSION]: telemetryConfig.serviceVersion,
|
||||
[SemanticResourceAttributes.DEPLOYMENT_ENVIRONMENT]: process.env.NODE_ENV,
|
||||
})
|
||||
|
||||
const sdk = new NodeSDK({
|
||||
resource: configResource,
|
||||
spanProcessors: [spanProcessor],
|
||||
})
|
||||
|
||||
sdk.start()
|
||||
|
||||
const shutdownHandler = async () => {
|
||||
await sdk
|
||||
.shutdown()
|
||||
.then(() => logger.info('OpenTelemetry SDK shut down successfully'))
|
||||
.catch((err) => logger.error('Error shutting down OpenTelemetry SDK', err))
|
||||
}
|
||||
|
||||
process.on('SIGTERM', shutdownHandler)
|
||||
process.on('SIGINT', shutdownHandler)
|
||||
|
||||
logger.info('OpenTelemetry instrumentation initialized for server-side telemetry')
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize OpenTelemetry instrumentation', error)
|
||||
}
|
||||
if (typeof window !== 'undefined') {
|
||||
await import('./instrumentation-client')
|
||||
}
|
||||
}
|
||||
|
||||
export const onRequestError = Sentry.captureRequestError
|
||||
|
||||
@@ -4,17 +4,24 @@ import { organizationClient } from 'better-auth/client/plugins'
|
||||
import { createAuthClient } from 'better-auth/react'
|
||||
import { isProd } from '@/lib/environment'
|
||||
|
||||
const clientEnv = {
|
||||
NEXT_PUBLIC_VERCEL_URL: process.env.NEXT_PUBLIC_VERCEL_URL,
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
VERCEL_ENV: process.env.VERCEL_ENV || '',
|
||||
BETTER_AUTH_URL: process.env.BETTER_AUTH_URL || 'http://localhost:3000',
|
||||
}
|
||||
|
||||
export function getBaseURL() {
|
||||
let baseURL
|
||||
|
||||
if (process.env.VERCEL_ENV === 'preview') {
|
||||
baseURL = `https://${process.env.NEXT_PUBLIC_VERCEL_URL}`
|
||||
} else if (process.env.VERCEL_ENV === 'development') {
|
||||
baseURL = `https://${process.env.NEXT_PUBLIC_VERCEL_URL}`
|
||||
} else if (process.env.VERCEL_ENV === 'production') {
|
||||
baseURL = process.env.BETTER_AUTH_URL
|
||||
} else if (process.env.NODE_ENV === 'development') {
|
||||
baseURL = process.env.BETTER_AUTH_URL
|
||||
if (clientEnv.VERCEL_ENV === 'preview') {
|
||||
baseURL = `https://${clientEnv.NEXT_PUBLIC_VERCEL_URL}`
|
||||
} else if (clientEnv.VERCEL_ENV === 'development') {
|
||||
baseURL = `https://${clientEnv.NEXT_PUBLIC_VERCEL_URL}`
|
||||
} else if (clientEnv.VERCEL_ENV === 'production') {
|
||||
baseURL = clientEnv.BETTER_AUTH_URL
|
||||
} else if (clientEnv.NODE_ENV === 'development') {
|
||||
baseURL = clientEnv.BETTER_AUTH_URL
|
||||
}
|
||||
|
||||
return baseURL
|
||||
|
||||
@@ -16,21 +16,22 @@ import {
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { db } from '@/db'
|
||||
import * as schema from '@/db/schema'
|
||||
import { env } from './env'
|
||||
|
||||
const logger = createLogger('Auth')
|
||||
|
||||
const isProd = process.env.NODE_ENV === 'production'
|
||||
const isProd = env.NODE_ENV === 'production'
|
||||
|
||||
// Only initialize Stripe if the key is provided
|
||||
// This allows local development without a Stripe account
|
||||
const validStripeKey =
|
||||
process.env.STRIPE_SECRET_KEY &&
|
||||
process.env.STRIPE_SECRET_KEY.trim() !== '' &&
|
||||
process.env.STRIPE_SECRET_KEY !== 'placeholder'
|
||||
env.STRIPE_SECRET_KEY &&
|
||||
env.STRIPE_SECRET_KEY.trim() !== '' &&
|
||||
env.STRIPE_SECRET_KEY !== 'placeholder'
|
||||
|
||||
let stripeClient = null
|
||||
if (validStripeKey) {
|
||||
stripeClient = new Stripe(process.env.STRIPE_SECRET_KEY || '', {
|
||||
stripeClient = new Stripe(env.STRIPE_SECRET_KEY || '', {
|
||||
apiVersion: '2025-02-24.acacia',
|
||||
})
|
||||
}
|
||||
@@ -39,12 +40,10 @@ if (validStripeKey) {
|
||||
// In that case, we don't want to send emails and just log them
|
||||
|
||||
const validResendAPIKEY =
|
||||
process.env.RESEND_API_KEY &&
|
||||
process.env.RESEND_API_KEY.trim() !== '' &&
|
||||
process.env.RESEND_API_KEY !== 'placeholder'
|
||||
env.RESEND_API_KEY && env.RESEND_API_KEY.trim() !== '' && env.RESEND_API_KEY !== 'placeholder'
|
||||
|
||||
const resend = validResendAPIKEY
|
||||
? new Resend(process.env.RESEND_API_KEY)
|
||||
? new Resend(env.RESEND_API_KEY)
|
||||
: {
|
||||
emails: {
|
||||
send: async (...args: any[]) => {
|
||||
@@ -121,13 +120,13 @@ export const auth = betterAuth({
|
||||
},
|
||||
socialProviders: {
|
||||
github: {
|
||||
clientId: process.env.GITHUB_CLIENT_ID as string,
|
||||
clientSecret: process.env.GITHUB_CLIENT_SECRET as string,
|
||||
clientId: env.GITHUB_CLIENT_ID as string,
|
||||
clientSecret: env.GITHUB_CLIENT_SECRET as string,
|
||||
scopes: ['user:email', 'repo'],
|
||||
},
|
||||
google: {
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
scopes: [
|
||||
'https://www.googleapis.com/auth/userinfo.email',
|
||||
'https://www.googleapis.com/auth/userinfo.profile',
|
||||
@@ -209,15 +208,15 @@ export const auth = betterAuth({
|
||||
config: [
|
||||
{
|
||||
providerId: 'github-repo',
|
||||
clientId: process.env.GITHUB_REPO_CLIENT_ID as string,
|
||||
clientSecret: process.env.GITHUB_REPO_CLIENT_SECRET as string,
|
||||
clientId: env.GITHUB_REPO_CLIENT_ID as string,
|
||||
clientSecret: env.GITHUB_REPO_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://github.com/login/oauth/authorize',
|
||||
accessType: 'offline',
|
||||
prompt: 'consent',
|
||||
tokenUrl: 'https://github.com/login/oauth/access_token',
|
||||
userInfoUrl: 'https://api.github.com/user',
|
||||
scopes: ['user:email', 'repo', 'read:user', 'workflow'],
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/github-repo`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/github-repo`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
// Fetch user profile
|
||||
@@ -290,8 +289,8 @@ export const auth = betterAuth({
|
||||
// Google providers for different purposes
|
||||
{
|
||||
providerId: 'google-email',
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
discoveryUrl: 'https://accounts.google.com/.well-known/openid-configuration',
|
||||
accessType: 'offline',
|
||||
scopes: [
|
||||
@@ -303,12 +302,12 @@ export const auth = betterAuth({
|
||||
'https://www.googleapis.com/auth/gmail.labels',
|
||||
],
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-email`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-email`,
|
||||
},
|
||||
{
|
||||
providerId: 'google-calendar',
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
discoveryUrl: 'https://accounts.google.com/.well-known/openid-configuration',
|
||||
accessType: 'offline',
|
||||
scopes: [
|
||||
@@ -317,12 +316,12 @@ export const auth = betterAuth({
|
||||
'https://www.googleapis.com/auth/calendar',
|
||||
],
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-calendar`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-calendar`,
|
||||
},
|
||||
{
|
||||
providerId: 'google-drive',
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
discoveryUrl: 'https://accounts.google.com/.well-known/openid-configuration',
|
||||
accessType: 'offline',
|
||||
scopes: [
|
||||
@@ -331,12 +330,12 @@ export const auth = betterAuth({
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
],
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-drive`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-drive`,
|
||||
},
|
||||
{
|
||||
providerId: 'google-docs',
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
discoveryUrl: 'https://accounts.google.com/.well-known/openid-configuration',
|
||||
accessType: 'offline',
|
||||
scopes: [
|
||||
@@ -345,12 +344,12 @@ export const auth = betterAuth({
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
],
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-docs`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-docs`,
|
||||
},
|
||||
{
|
||||
providerId: 'google-sheets',
|
||||
clientId: process.env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.GOOGLE_CLIENT_SECRET as string,
|
||||
clientId: env.GOOGLE_CLIENT_ID as string,
|
||||
clientSecret: env.GOOGLE_CLIENT_SECRET as string,
|
||||
discoveryUrl: 'https://accounts.google.com/.well-known/openid-configuration',
|
||||
accessType: 'offline',
|
||||
scopes: [
|
||||
@@ -360,14 +359,14 @@ export const auth = betterAuth({
|
||||
'https://www.googleapis.com/auth/drive.file',
|
||||
],
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-sheets`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/google-sheets`,
|
||||
},
|
||||
|
||||
// Supabase provider
|
||||
{
|
||||
providerId: 'supabase',
|
||||
clientId: process.env.SUPABASE_CLIENT_ID as string,
|
||||
clientSecret: process.env.SUPABASE_CLIENT_SECRET as string,
|
||||
clientId: env.SUPABASE_CLIENT_ID as string,
|
||||
clientSecret: env.SUPABASE_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://api.supabase.com/v1/oauth/authorize',
|
||||
tokenUrl: 'https://api.supabase.com/v1/oauth/token',
|
||||
// Supabase doesn't have a standard userInfo endpoint that works with our flow,
|
||||
@@ -376,7 +375,7 @@ export const auth = betterAuth({
|
||||
scopes: ['database.read', 'database.write', 'projects.read'],
|
||||
responseType: 'code',
|
||||
pkce: true,
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/supabase`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/supabase`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
logger.info('Creating Supabase user profile from token data')
|
||||
@@ -422,8 +421,8 @@ export const auth = betterAuth({
|
||||
// X provider
|
||||
{
|
||||
providerId: 'x',
|
||||
clientId: process.env.X_CLIENT_ID as string,
|
||||
clientSecret: process.env.X_CLIENT_SECRET as string,
|
||||
clientId: env.X_CLIENT_ID as string,
|
||||
clientSecret: env.X_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://x.com/i/oauth2/authorize',
|
||||
tokenUrl: 'https://api.x.com/2/oauth2/token',
|
||||
userInfoUrl: 'https://api.x.com/2/users/me',
|
||||
@@ -433,7 +432,7 @@ export const auth = betterAuth({
|
||||
responseType: 'code',
|
||||
prompt: 'consent',
|
||||
authentication: 'basic',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/x`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/x`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch(
|
||||
@@ -481,8 +480,8 @@ export const auth = betterAuth({
|
||||
// Confluence provider
|
||||
{
|
||||
providerId: 'confluence',
|
||||
clientId: process.env.CONFLUENCE_CLIENT_ID as string,
|
||||
clientSecret: process.env.CONFLUENCE_CLIENT_SECRET as string,
|
||||
clientId: env.CONFLUENCE_CLIENT_ID as string,
|
||||
clientSecret: env.CONFLUENCE_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://auth.atlassian.com/authorize',
|
||||
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
||||
userInfoUrl: 'https://api.atlassian.com/me',
|
||||
@@ -492,7 +491,7 @@ export const auth = betterAuth({
|
||||
accessType: 'offline',
|
||||
authentication: 'basic',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/confluence`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/confluence`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch('https://api.atlassian.com/me', {
|
||||
@@ -532,8 +531,8 @@ export const auth = betterAuth({
|
||||
// Discord provider
|
||||
{
|
||||
providerId: 'discord',
|
||||
clientId: process.env.DISCORD_CLIENT_ID as string,
|
||||
clientSecret: process.env.DISCORD_CLIENT_SECRET as string,
|
||||
clientId: env.DISCORD_CLIENT_ID as string,
|
||||
clientSecret: env.DISCORD_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://discord.com/api/oauth2/authorize',
|
||||
tokenUrl: 'https://discord.com/api/oauth2/token',
|
||||
userInfoUrl: 'https://discord.com/api/users/@me',
|
||||
@@ -542,7 +541,7 @@ export const auth = betterAuth({
|
||||
accessType: 'offline',
|
||||
authentication: 'basic',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/discord`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/discord`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch('https://discord.com/api/users/@me', {
|
||||
@@ -583,8 +582,8 @@ export const auth = betterAuth({
|
||||
// Jira provider
|
||||
{
|
||||
providerId: 'jira',
|
||||
clientId: process.env.JIRA_CLIENT_ID as string,
|
||||
clientSecret: process.env.JIRA_CLIENT_SECRET as string,
|
||||
clientId: env.JIRA_CLIENT_ID as string,
|
||||
clientSecret: env.JIRA_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://auth.atlassian.com/authorize',
|
||||
tokenUrl: 'https://auth.atlassian.com/oauth/token',
|
||||
userInfoUrl: 'https://api.atlassian.com/me',
|
||||
@@ -613,7 +612,7 @@ export const auth = betterAuth({
|
||||
accessType: 'offline',
|
||||
authentication: 'basic',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/jira`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/jira`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch('https://api.atlassian.com/me', {
|
||||
@@ -653,8 +652,8 @@ export const auth = betterAuth({
|
||||
// Airtable provider
|
||||
{
|
||||
providerId: 'airtable',
|
||||
clientId: process.env.AIRTABLE_CLIENT_ID as string,
|
||||
clientSecret: process.env.AIRTABLE_CLIENT_SECRET as string,
|
||||
clientId: env.AIRTABLE_CLIENT_ID as string,
|
||||
clientSecret: env.AIRTABLE_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://airtable.com/oauth2/v1/authorize',
|
||||
tokenUrl: 'https://airtable.com/oauth2/v1/token',
|
||||
userInfoUrl: 'https://api.airtable.com/v0/meta/whoami',
|
||||
@@ -664,14 +663,14 @@ export const auth = betterAuth({
|
||||
accessType: 'offline',
|
||||
authentication: 'basic',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/airtable`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/airtable`,
|
||||
},
|
||||
|
||||
// Notion provider
|
||||
{
|
||||
providerId: 'notion',
|
||||
clientId: process.env.NOTION_CLIENT_ID as string,
|
||||
clientSecret: process.env.NOTION_CLIENT_SECRET as string,
|
||||
clientId: env.NOTION_CLIENT_ID as string,
|
||||
clientSecret: env.NOTION_CLIENT_SECRET as string,
|
||||
authorizationUrl: 'https://api.notion.com/v1/oauth/authorize',
|
||||
tokenUrl: 'https://api.notion.com/v1/oauth/token',
|
||||
userInfoUrl: 'https://api.notion.com/v1/users/me',
|
||||
@@ -681,7 +680,7 @@ export const auth = betterAuth({
|
||||
accessType: 'offline',
|
||||
authentication: 'basic',
|
||||
prompt: 'consent',
|
||||
redirectURI: `${process.env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/notion`,
|
||||
redirectURI: `${env.NEXT_PUBLIC_APP_URL}/api/auth/oauth2/callback/notion`,
|
||||
getUserInfo: async (tokens) => {
|
||||
try {
|
||||
const response = await fetch('https://api.notion.com/v1/users/me', {
|
||||
@@ -724,7 +723,7 @@ export const auth = betterAuth({
|
||||
? [
|
||||
stripe({
|
||||
stripeClient,
|
||||
stripeWebhookSecret: process.env.STRIPE_WEBHOOK_SECRET || '',
|
||||
stripeWebhookSecret: env.STRIPE_WEBHOOK_SECRET || '',
|
||||
createCustomerOnSignUp: true,
|
||||
onCustomerCreate: async ({ customer, stripeCustomer, user }, request) => {
|
||||
logger.info('Stripe customer created', {
|
||||
@@ -737,11 +736,9 @@ export const auth = betterAuth({
|
||||
plans: [
|
||||
{
|
||||
name: 'free',
|
||||
priceId: process.env.STRIPE_FREE_PRICE_ID || '',
|
||||
priceId: env.STRIPE_FREE_PRICE_ID || '',
|
||||
limits: {
|
||||
cost: process.env.FREE_TIER_COST_LIMIT
|
||||
? parseInt(process.env.FREE_TIER_COST_LIMIT)
|
||||
: 5,
|
||||
cost: env.FREE_TIER_COST_LIMIT ? parseInt(env.FREE_TIER_COST_LIMIT) : 5,
|
||||
sharingEnabled: 0,
|
||||
multiplayerEnabled: 0,
|
||||
workspaceCollaborationEnabled: 0,
|
||||
@@ -749,11 +746,9 @@ export const auth = betterAuth({
|
||||
},
|
||||
{
|
||||
name: 'pro',
|
||||
priceId: process.env.STRIPE_PRO_PRICE_ID || '',
|
||||
priceId: env.STRIPE_PRO_PRICE_ID || '',
|
||||
limits: {
|
||||
cost: process.env.PRO_TIER_COST_LIMIT
|
||||
? parseInt(process.env.PRO_TIER_COST_LIMIT)
|
||||
: 20,
|
||||
cost: env.PRO_TIER_COST_LIMIT ? parseInt(env.PRO_TIER_COST_LIMIT) : 20,
|
||||
sharingEnabled: 1,
|
||||
multiplayerEnabled: 0,
|
||||
workspaceCollaborationEnabled: 0,
|
||||
@@ -761,11 +756,9 @@ export const auth = betterAuth({
|
||||
},
|
||||
{
|
||||
name: 'team',
|
||||
priceId: process.env.STRIPE_TEAM_PRICE_ID || '',
|
||||
priceId: env.STRIPE_TEAM_PRICE_ID || '',
|
||||
limits: {
|
||||
cost: process.env.TEAM_TIER_COST_LIMIT
|
||||
? parseInt(process.env.TEAM_TIER_COST_LIMIT)
|
||||
: 40, // $40 per seat
|
||||
cost: env.TEAM_TIER_COST_LIMIT ? parseInt(env.TEAM_TIER_COST_LIMIT) : 40, // $40 per seat
|
||||
sharingEnabled: 1,
|
||||
multiplayerEnabled: 1,
|
||||
workspaceCollaborationEnabled: 1,
|
||||
@@ -926,7 +919,7 @@ export const auth = betterAuth({
|
||||
try {
|
||||
const { invitation, organization, inviter } = data
|
||||
|
||||
const inviteUrl = `${process.env.NEXT_PUBLIC_APP_URL}/invite/${invitation.id}`
|
||||
const inviteUrl = `${env.NEXT_PUBLIC_APP_URL}/invite/${invitation.id}`
|
||||
const inviterName = inviter.user?.name || 'A team member'
|
||||
|
||||
const html = await renderInvitationEmail(
|
||||
|
||||
119
apps/sim/lib/env.ts
Normal file
119
apps/sim/lib/env.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { createEnv } from '@t3-oss/env-nextjs'
|
||||
import { env as runtimeEnv } from 'next-runtime-env'
|
||||
import { z } from 'zod'
|
||||
|
||||
const getEnv = (variable: string) => runtimeEnv(variable) ?? process.env[variable]
|
||||
|
||||
export const env = createEnv({
|
||||
skipValidation: true,
|
||||
|
||||
server: {
|
||||
DATABASE_URL: z.string().url(),
|
||||
BETTER_AUTH_URL: z.string().url(),
|
||||
BETTER_AUTH_SECRET: z.string().min(32),
|
||||
ENCRYPTION_KEY: z.string().min(32),
|
||||
|
||||
POSTGRES_URL: z.string().url().optional(),
|
||||
STRIPE_SECRET_KEY: z.string().min(1).optional(),
|
||||
STRIPE_WEBHOOK_SECRET: z.string().min(1).optional(),
|
||||
STRIPE_FREE_PRICE_ID: z.string().min(1).optional(),
|
||||
FREE_TIER_COST_LIMIT: z
|
||||
.string()
|
||||
.regex(/^\d+(\.\d+)?$/)
|
||||
.optional(),
|
||||
STRIPE_PRO_PRICE_ID: z.string().min(1).optional(),
|
||||
PRO_TIER_COST_LIMIT: z
|
||||
.string()
|
||||
.regex(/^\d+(\.\d+)?$/)
|
||||
.optional(),
|
||||
STRIPE_TEAM_PRICE_ID: z.string().min(1).optional(),
|
||||
TEAM_TIER_COST_LIMIT: z
|
||||
.string()
|
||||
.regex(/^\d+(\.\d+)?$/)
|
||||
.optional(),
|
||||
STRIPE_ENTERPRISE_PRICE_ID: z.string().min(1).optional(),
|
||||
ENTERPRISE_TIER_COST_LIMIT: z
|
||||
.string()
|
||||
.regex(/^\d+(\.\d+)?$/)
|
||||
.optional(),
|
||||
RESEND_API_KEY: z.string().min(1).optional(),
|
||||
OPENAI_API_KEY: z.string().min(1).optional(),
|
||||
OPENAI_API_KEY_1: z.string().min(1).optional(),
|
||||
OPENAI_API_KEY_2: z.string().min(1).optional(),
|
||||
OPENAI_API_KEY_3: z.string().min(1).optional(),
|
||||
ANTHROPIC_API_KEY_1: z.string().min(1).optional(),
|
||||
ANTHROPIC_API_KEY_2: z.string().min(1).optional(),
|
||||
ANTHROPIC_API_KEY_3: z.string().min(1).optional(),
|
||||
FREESTYLE_API_KEY: z.string().min(1).optional(),
|
||||
TELEMETRY_ENDPOINT: z.string().url().optional(),
|
||||
COST_MULTIPLIER: z
|
||||
.string()
|
||||
.regex(/^\d+(\.\d+)?$/)
|
||||
.optional(),
|
||||
JWT_SECRET: z.string().min(1).optional(),
|
||||
BROWSERBASE_API_KEY: z.string().min(1).optional(),
|
||||
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(),
|
||||
OLLAMA_HOST: z.string().url().optional(),
|
||||
SENTRY_ORG: z.string().optional(),
|
||||
SENTRY_PROJECT: z.string().optional(),
|
||||
SENTRY_AUTH_TOKEN: z.string().optional(),
|
||||
REDIS_URL: z.string().url().optional(),
|
||||
NEXT_TELEMETRY_DISABLED: z.string().optional(),
|
||||
NEXT_RUNTIME: z.string().optional(),
|
||||
VERCEL_ENV: z.string().optional(),
|
||||
AWS_REGION: z.string().optional(),
|
||||
AWS_ACCESS_KEY_ID: z.string().optional(),
|
||||
AWS_SECRET_ACCESS_KEY: z.string().optional(),
|
||||
S3_BUCKET_NAME: z.string().optional(),
|
||||
S3_LOGS_BUCKET_NAME: z.string().optional(),
|
||||
USE_S3: z.coerce.boolean().optional(),
|
||||
CRON_SECRET: z.string().optional(),
|
||||
FREE_PLAN_LOG_RETENTION_DAYS: z.string().optional(),
|
||||
NODE_ENV: z.string().optional(),
|
||||
GITHUB_TOKEN: z.string().optional(),
|
||||
|
||||
// OAuth blocks (all optional)
|
||||
GOOGLE_CLIENT_ID: z.string().optional(),
|
||||
GOOGLE_CLIENT_SECRET: z.string().optional(),
|
||||
GITHUB_CLIENT_ID: z.string().optional(),
|
||||
GITHUB_CLIENT_SECRET: z.string().optional(),
|
||||
GITHUB_REPO_CLIENT_ID: z.string().optional(),
|
||||
GITHUB_REPO_CLIENT_SECRET: z.string().optional(),
|
||||
X_CLIENT_ID: z.string().optional(),
|
||||
X_CLIENT_SECRET: z.string().optional(),
|
||||
CONFLUENCE_CLIENT_ID: z.string().optional(),
|
||||
CONFLUENCE_CLIENT_SECRET: z.string().optional(),
|
||||
JIRA_CLIENT_ID: z.string().optional(),
|
||||
JIRA_CLIENT_SECRET: z.string().optional(),
|
||||
AIRTABLE_CLIENT_ID: z.string().optional(),
|
||||
AIRTABLE_CLIENT_SECRET: z.string().optional(),
|
||||
SUPABASE_CLIENT_ID: z.string().optional(),
|
||||
SUPABASE_CLIENT_SECRET: z.string().optional(),
|
||||
NOTION_CLIENT_ID: z.string().optional(),
|
||||
NOTION_CLIENT_SECRET: z.string().optional(),
|
||||
DISCORD_CLIENT_ID: z.string().optional(),
|
||||
DISCORD_CLIENT_SECRET: z.string().optional(),
|
||||
HUBSPOT_CLIENT_ID: z.string().optional(),
|
||||
HUBSPOT_CLIENT_SECRET: z.string().optional(),
|
||||
DOCKER_BUILD: z.boolean().optional(),
|
||||
},
|
||||
|
||||
client: {
|
||||
NEXT_PUBLIC_APP_URL: z.string().url(),
|
||||
NEXT_PUBLIC_VERCEL_URL: z.string().optional(),
|
||||
NEXT_PUBLIC_SENTRY_DSN: z.string().url().optional(),
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: z.string().optional(),
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: z.string().optional(),
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: z.string().optional(),
|
||||
},
|
||||
|
||||
// Only need to define client variables, server variables are automatically handled
|
||||
experimental__runtimeEnv: {
|
||||
NEXT_PUBLIC_APP_URL: getEnv('NEXT_PUBLIC_APP_URL'),
|
||||
NEXT_PUBLIC_VERCEL_URL: getEnv('NEXT_PUBLIC_VERCEL_URL'),
|
||||
NEXT_PUBLIC_SENTRY_DSN: getEnv('NEXT_PUBLIC_SENTRY_DSN'),
|
||||
NEXT_PUBLIC_GOOGLE_CLIENT_ID: getEnv('NEXT_PUBLIC_GOOGLE_CLIENT_ID'),
|
||||
NEXT_PUBLIC_GOOGLE_API_KEY: getEnv('NEXT_PUBLIC_GOOGLE_API_KEY'),
|
||||
NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER: getEnv('NEXT_PUBLIC_GOOGLE_PROJECT_NUMBER'),
|
||||
},
|
||||
})
|
||||
@@ -1,30 +1,39 @@
|
||||
/**
|
||||
* Environment utility functions for consistent environment detection across the application
|
||||
*/
|
||||
import { env } from './env'
|
||||
|
||||
export const getNodeEnv = () => {
|
||||
try {
|
||||
return env.NODE_ENV
|
||||
} catch {
|
||||
return process.env.NODE_ENV
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the application running in production mode
|
||||
*/
|
||||
export const isProd = process.env.NODE_ENV === 'production'
|
||||
export const isProd = getNodeEnv() === 'production'
|
||||
|
||||
/**
|
||||
* Is the application running in development mode
|
||||
*/
|
||||
export const isDev = process.env.NODE_ENV === 'development'
|
||||
export const isDev = getNodeEnv() === 'development'
|
||||
|
||||
/**
|
||||
* Is the application running in test mode
|
||||
*/
|
||||
export const isTest = process.env.NODE_ENV === 'test'
|
||||
export const isTest = getNodeEnv() === 'test'
|
||||
|
||||
/**
|
||||
* Is this the hosted version of the application
|
||||
*/
|
||||
export const isHosted = process.env.NEXT_PUBLIC_APP_URL === 'https://www.simstudio.ai'
|
||||
export const isHosted = env.NEXT_PUBLIC_APP_URL === 'https://www.simstudio.ai'
|
||||
|
||||
/**
|
||||
* Get cost multiplier based on environment
|
||||
*/
|
||||
export function getCostMultiplier(): number {
|
||||
return isProd ? parseFloat(process.env.COST_MULTIPLIER!) || 1 : 1
|
||||
return isProd ? parseFloat(env.COST_MULTIPLIER!) || 1 : 1
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { FreestyleSandboxes } from 'freestyle-sandboxes'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { env } from './env'
|
||||
|
||||
const logger = createLogger('Freestyle')
|
||||
|
||||
@@ -16,7 +17,7 @@ export async function getFreestyleClient(): Promise<FreestyleSandboxes> {
|
||||
|
||||
try {
|
||||
freestyleInstance = new FreestyleSandboxes({
|
||||
apiKey: process.env.FREESTYLE_API_KEY!, // make sure to set this
|
||||
apiKey: env.FREESTYLE_API_KEY!, // make sure to set this
|
||||
})
|
||||
|
||||
return freestyleInstance
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import OpenAI from 'openai'
|
||||
import { env } from './env'
|
||||
|
||||
/**
|
||||
* Generates a short title for a chat based on the first message
|
||||
@@ -6,7 +7,7 @@ import OpenAI from 'openai'
|
||||
* @returns A short title or null if API key is not available
|
||||
*/
|
||||
export async function generateChatTitle(message: string): Promise<string | null> {
|
||||
const apiKey = process.env.OPENAI_API_KEY
|
||||
const apiKey = env.OPENAI_API_KEY
|
||||
|
||||
if (!apiKey) {
|
||||
return null
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
* It is separate from the user-facing logging system in logging.ts.
|
||||
*/
|
||||
import chalk from 'chalk'
|
||||
import { env } from '../env'
|
||||
|
||||
/**
|
||||
* LogLevel enum defines the severity levels for logging
|
||||
@@ -55,7 +56,7 @@ const LOG_CONFIG = {
|
||||
}
|
||||
|
||||
// Get current environment
|
||||
const ENV = process.env.NODE_ENV || 'development'
|
||||
const ENV = (process.env.NODE_ENV || 'development') as keyof typeof LOG_CONFIG
|
||||
const config = LOG_CONFIG[ENV] || LOG_CONFIG.development
|
||||
|
||||
// Format objects for logging
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { Resend } from 'resend'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { env } from './env'
|
||||
|
||||
interface EmailOptions {
|
||||
to: string
|
||||
@@ -27,7 +28,7 @@ interface BatchSendEmailResult {
|
||||
|
||||
const logger = createLogger('Mailer')
|
||||
|
||||
const resendApiKey = process.env.RESEND_API_KEY
|
||||
const resendApiKey = env.RESEND_API_KEY
|
||||
const resend =
|
||||
resendApiKey && resendApiKey !== 'placeholder' && resendApiKey.trim() !== ''
|
||||
? new Resend(resendApiKey)
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
xIcon,
|
||||
} from '@/components/icons'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { env } from './env'
|
||||
|
||||
const logger = createLogger('OAuth')
|
||||
|
||||
@@ -410,52 +411,52 @@ export async function refreshOAuthToken(
|
||||
switch (provider) {
|
||||
case 'google':
|
||||
tokenEndpoint = 'https://oauth2.googleapis.com/token'
|
||||
clientId = process.env.GOOGLE_CLIENT_ID
|
||||
clientSecret = process.env.GOOGLE_CLIENT_SECRET
|
||||
clientId = env.GOOGLE_CLIENT_ID
|
||||
clientSecret = env.GOOGLE_CLIENT_SECRET
|
||||
break
|
||||
case 'github':
|
||||
tokenEndpoint = 'https://github.com/login/oauth/access_token'
|
||||
clientId = process.env.GITHUB_CLIENT_ID
|
||||
clientSecret = process.env.GITHUB_CLIENT_SECRET
|
||||
clientId = env.GITHUB_CLIENT_ID
|
||||
clientSecret = env.GITHUB_CLIENT_SECRET
|
||||
break
|
||||
case 'x':
|
||||
tokenEndpoint = 'https://api.x.com/2/oauth2/token'
|
||||
clientId = process.env.X_CLIENT_ID
|
||||
clientSecret = process.env.X_CLIENT_SECRET
|
||||
clientId = env.X_CLIENT_ID
|
||||
clientSecret = env.X_CLIENT_SECRET
|
||||
useBasicAuth = true
|
||||
break
|
||||
case 'confluence':
|
||||
tokenEndpoint = 'https://auth.atlassian.com/oauth/token'
|
||||
clientId = process.env.CONFLUENCE_CLIENT_ID
|
||||
clientSecret = process.env.CONFLUENCE_CLIENT_SECRET
|
||||
clientId = env.CONFLUENCE_CLIENT_ID
|
||||
clientSecret = env.CONFLUENCE_CLIENT_SECRET
|
||||
useBasicAuth = true
|
||||
break
|
||||
case 'jira':
|
||||
tokenEndpoint = 'https://auth.atlassian.com/oauth/token'
|
||||
clientId = process.env.JIRA_CLIENT_ID
|
||||
clientSecret = process.env.JIRA_CLIENT_SECRET
|
||||
clientId = env.JIRA_CLIENT_ID
|
||||
clientSecret = env.JIRA_CLIENT_SECRET
|
||||
useBasicAuth = true
|
||||
break
|
||||
case 'airtable':
|
||||
tokenEndpoint = 'https://airtable.com/oauth2/v1/token'
|
||||
clientId = process.env.AIRTABLE_CLIENT_ID
|
||||
clientSecret = process.env.AIRTABLE_CLIENT_SECRET
|
||||
clientId = env.AIRTABLE_CLIENT_ID
|
||||
clientSecret = env.AIRTABLE_CLIENT_SECRET
|
||||
useBasicAuth = true
|
||||
break
|
||||
case 'supabase':
|
||||
tokenEndpoint = 'https://api.supabase.com/v1/oauth/token'
|
||||
clientId = process.env.SUPABASE_CLIENT_ID
|
||||
clientSecret = process.env.SUPABASE_CLIENT_SECRET
|
||||
clientId = env.SUPABASE_CLIENT_ID
|
||||
clientSecret = env.SUPABASE_CLIENT_SECRET
|
||||
break
|
||||
case 'notion':
|
||||
tokenEndpoint = 'https://api.notion.com/v1/oauth/token'
|
||||
clientId = process.env.NOTION_CLIENT_ID
|
||||
clientSecret = process.env.NOTION_CLIENT_SECRET
|
||||
clientId = env.NOTION_CLIENT_ID
|
||||
clientSecret = env.NOTION_CLIENT_SECRET
|
||||
break
|
||||
case 'discord':
|
||||
tokenEndpoint = 'https://discord.com/api/v10/oauth2/token'
|
||||
clientId = process.env.DISCORD_CLIENT_ID
|
||||
clientSecret = process.env.DISCORD_CLIENT_SECRET
|
||||
clientId = env.DISCORD_CLIENT_ID
|
||||
clientSecret = env.DISCORD_CLIENT_SECRET
|
||||
useBasicAuth = true
|
||||
break
|
||||
default:
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import Redis from 'ioredis'
|
||||
import { createLogger } from '@/lib/logs/console-logger'
|
||||
import { env } from './env'
|
||||
|
||||
const logger = createLogger('Redis')
|
||||
|
||||
// Default to localhost if REDIS_URL is not provided
|
||||
const redisUrl = process.env.REDIS_URL || 'redis://localhost:6379'
|
||||
const redisUrl = env.REDIS_URL || 'redis://localhost:6379'
|
||||
|
||||
// Global Redis client for connection pooling
|
||||
// This is important for serverless environments like Vercel
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user