Files
sim/start_simstudio_docker.sh
Arunabh Sharma fe2c7d8d98 feat(ollama): Adding ollama for enabling local model agents (#153)
* feat(ollama): add ollama package dependency, add two separate deployment docker compose files and add a shell script to toggle between the deployment docker compose files

add base ollama.ts implementation

add latest attempt to fetch Ollama models dynamically

fix ollama dynamic model fetching, models now being rendered on GUI

fix package and package-lock.json to remove ollama dependency and add types.ts for ollama

switch MODEL_PROVIDERS to getModelProviders

make dynamic ollama model dropdown change using zustland store

make dynamic ollama model changes to router and evaluator ts too

* feat(ollama): fix evaluated options by de-duplicating it

* feat(ollama): make README.md change to reflect local model workflow

* feat(ollama): add base non-ollama docker compose file, add --local flag to start_simstudio_docker.sh with ollama service

* feat(ollama): fix README.md local model instructions

* feat(ollama): remove de-duplication logic and separate getModelProviders into two

* fix non-local init and translate.ts

* create combined docker-compose file and fix start_simstudio_docker script too

* update package-lock.json

* feat(ollama): fix README.md instructions and docker compose

---------

Co-authored-by: Arunabh Sharma <arunabh.sharma@supernal.aero>
2025-03-29 13:34:44 -07:00

70 lines
1.9 KiB
Bash
Executable File

#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SIM_DIR=$SCRIPT_DIR/sim
# Function to display help
show_help() {
echo "Usage: $0 [OPTIONS]"
echo
echo "Start Sim Studio with Docker containers"
echo
echo "Options:"
echo " -h, --help Show this help message"
echo " --local Use local LLM configuration with Ollama service"
echo
echo "Examples:"
echo " $0 # Start without local LLM"
echo " $0 --local # Start with local LLM (requires GPU)"
echo
echo "Note: When using --local flag, GPU availability is automatically detected"
echo " and appropriate configuration is used."
exit 0
}
# Parse command line arguments
LOCAL=false
while [[ "$#" -gt 0 ]]; do
case $1 in
-h|--help) show_help ;;
--local) LOCAL=true ;;
*) echo "Unknown parameter: $1"; echo "Use -h or --help for usage information"; exit 1 ;;
esac
shift
done
# Check if .env file exists, if not, create from example
if [ ! -f $SIM_DIR/.env ]; then
echo "Creating .env file from .env.example..."
cp $SIM_DIR/.env.example $SIM_DIR/.env
echo "Please update .env file with your configuration."
else
echo ".env file found."
fi
# Stop any running containers
docker compose down
# Build and start containers
if [ "$LOCAL" = true ]; then
if nvidia-smi &> /dev/null; then
# GPU available with local LLM
docker compose --profile local-gpu up --build -d
else
# No GPU available with local LLM
docker compose --profile local-cpu up --build -d
fi
else
docker compose up --build -d
fi
# Wait for database to be ready
echo "Waiting for database to be ready..."
sleep 5
# Apply migrations automatically
echo "Applying database migrations..."
docker compose exec simstudio npm run db:push
echo "Sim Studio is now running at http://localhost:3000"
echo "To view logs, run: docker compose logs -f simstudio"