mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-08 22:48:14 -05:00
* migrate to bun * added envvars to drizzle * upgrade bun devcontainer feature to a valid one * added bun, docker not working * updated envvars, updated to bunder and esnext modules * fixed build, reinstated otel * feat: optimized multi-stage docker images * add coerce for boolean envvar * feat: add docker-compose configuration for local LLM services and remove legacy Dockerfile and entrypoint script * feat: add docker-compose files for local and production environments, and implement GitHub Actions for Docker image build and publish * refactor: remove unused generateStaticParams function from various API routes and maintain dynamic rendering * cleanup * upgraded bun * updated ci * fixed build --------- Co-authored-by: Aditya Tripathi <aditya@climactic.co>
49 lines
1.2 KiB
YAML
49 lines
1.2 KiB
YAML
services:
|
|
local-llm-gpu:
|
|
profiles:
|
|
- local-gpu # This profile requires both 'local' and 'gpu'
|
|
image: ollama/ollama:latest
|
|
pull_policy: always
|
|
volumes:
|
|
- ${HOME}/.ollama:/root/.ollama
|
|
ports:
|
|
- '11434:11434'
|
|
environment:
|
|
- NVIDIA_DRIVER_CAPABILITIES=all
|
|
- OLLAMA_LOAD_TIMEOUT=-1
|
|
- OLLAMA_KEEP_ALIVE=-1
|
|
- OLLAMA_DEBUG=1
|
|
command: 'serve'
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
healthcheck:
|
|
test: ['CMD', 'curl', '-f', 'http://localhost:11434/']
|
|
interval: 10s
|
|
timeout: 5s
|
|
retries: 5
|
|
|
|
local-llm-cpu:
|
|
profiles:
|
|
- local-cpu # This profile requires both 'local' and 'cpu'
|
|
image: ollama/ollama:latest
|
|
pull_policy: always
|
|
volumes:
|
|
- ${HOME}/.ollama:/root/.ollama
|
|
ports:
|
|
- '11434:11434'
|
|
environment:
|
|
- OLLAMA_LOAD_TIMEOUT=-1
|
|
- OLLAMA_KEEP_ALIVE=-1
|
|
- OLLAMA_DEBUG=1
|
|
command: 'serve'
|
|
healthcheck:
|
|
test: ['CMD', 'curl', '-f', 'http://localhost:11434/']
|
|
interval: 10s
|
|
timeout: 5s
|
|
retries: 5
|