feat(bun): upgrade to bun, reduce docker image size by 95%, upgrade docs & ci (#371)

* migrate to bun

* added envvars to drizzle

* upgrade bun devcontainer feature to a valid one

* added bun, docker not working

* updated envvars, updated to bunder and esnext modules

* fixed build, reinstated otel

* feat: optimized multi-stage docker images

* add coerce for boolean envvar

* feat: add docker-compose configuration for local LLM services and remove legacy Dockerfile and entrypoint script

* feat: add docker-compose files for local and production environments, and implement GitHub Actions for Docker image build and publish

* refactor: remove unused generateStaticParams function from various API routes and maintain dynamic rendering

* cleanup

* upgraded bun

* updated ci

* fixed build

---------

Co-authored-by: Aditya Tripathi <aditya@climactic.co>
This commit is contained in:
Waleed Latif
2025-05-18 01:01:32 -07:00
committed by GitHub
parent e57d3f79a1
commit 717e17d02a
144 changed files with 5173 additions and 53421 deletions

View File

@@ -16,7 +16,7 @@ import {
downloadFromS3,
FileInfo,
getPresignedUrl,
s3Client,
getS3Client,
uploadToS3,
} from './s3-client'
@@ -57,6 +57,8 @@ vi.mock('@/lib/logs/console-logger', () => ({
}),
}))
const s3Client = getS3Client()
describe('S3 Client', () => {
let mockDate: Date
let originalDateNow: typeof Date.now

View File

@@ -5,16 +5,33 @@ import {
S3Client,
} from '@aws-sdk/client-s3'
import { getSignedUrl } from '@aws-sdk/s3-request-presigner'
import { env } from '../env'
import { S3_CONFIG } from './setup'
// Create an S3 client
export const s3Client = new S3Client({
region: S3_CONFIG.region || '',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY || '',
},
})
// Lazily create a single S3 client instance.
let _s3Client: S3Client | null = null
export function getS3Client(): S3Client {
if (_s3Client) return _s3Client
const { region } = S3_CONFIG
if (!region) {
throw new Error(
'AWS region is missing set AWS_REGION in your environment or disable S3 uploads.'
)
}
_s3Client = new S3Client({
region,
credentials: {
accessKeyId: env.AWS_ACCESS_KEY_ID || '',
secretAccessKey: env.AWS_SECRET_ACCESS_KEY || '',
},
})
return _s3Client
}
/**
* File information structure
@@ -46,6 +63,8 @@ export async function uploadToS3(
const safeFileName = fileName.replace(/\s+/g, '-') // Replace spaces with hyphens
const uniqueKey = `${Date.now()}-${safeFileName}`
const s3Client = getS3Client()
// Upload the file to S3
await s3Client.send(
new PutObjectCommand({
@@ -85,7 +104,7 @@ export async function getPresignedUrl(key: string, expiresIn = 3600) {
Key: key,
})
return getSignedUrl(s3Client, command, { expiresIn })
return getSignedUrl(getS3Client(), command, { expiresIn })
}
/**
@@ -99,7 +118,7 @@ export async function downloadFromS3(key: string) {
Key: key,
})
const response = await s3Client.send(command)
const response = await getS3Client().send(command)
const stream = response.Body as any
// Convert stream to buffer
@@ -116,7 +135,7 @@ export async function downloadFromS3(key: string) {
* @param key S3 object key
*/
export async function deleteFromS3(key: string) {
await s3Client.send(
await getS3Client().send(
new DeleteObjectCommand({
Bucket: S3_CONFIG.bucket,
Key: key,

View File

@@ -1,4 +1,5 @@
import { createLogger } from '@/lib/logs/console-logger'
import { env } from '../env'
import { ensureUploadsDirectory, USE_S3_STORAGE } from './setup'
const logger = createLogger('UploadsSetup')
@@ -10,7 +11,7 @@ if (typeof process !== 'undefined') {
if (USE_S3_STORAGE) {
// Verify AWS credentials
if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
if (!env.AWS_ACCESS_KEY_ID || !env.AWS_SECRET_ACCESS_KEY) {
logger.warn('AWS credentials are not set in environment variables.')
logger.warn('Set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY for S3 storage.')
} else {

View File

@@ -3,6 +3,7 @@ import { mkdir } from 'fs/promises'
import { join } from 'path'
import path from 'path'
import { createLogger } from '@/lib/logs/console-logger'
import { env } from '../env'
const logger = createLogger('UploadsSetup')
@@ -12,11 +13,11 @@ const PROJECT_ROOT = path.resolve(process.cwd())
// Define the upload directory path using project root
export const UPLOAD_DIR = join(PROJECT_ROOT, 'uploads')
export const USE_S3_STORAGE = process.env.NODE_ENV === 'production' || process.env.USE_S3 === 'true'
export const USE_S3_STORAGE = env.NODE_ENV === 'production' || env.USE_S3
export const S3_CONFIG = {
bucket: process.env.S3_BUCKET_NAME || '',
region: process.env.AWS_REGION || '',
bucket: env.S3_BUCKET_NAME || '',
region: env.AWS_REGION || '',
}
/**