feat(ollama): Adding ollama for enabling local model agents (#153)

* feat(ollama): add ollama package dependency, add two separate deployment docker compose files and add a shell script to toggle between the deployment docker compose files

add base ollama.ts implementation

add latest attempt to fetch Ollama models dynamically

fix ollama dynamic model fetching, models now being rendered on GUI

fix package and package-lock.json to remove ollama dependency and add types.ts for ollama

switch MODEL_PROVIDERS to getModelProviders

make dynamic ollama model dropdown change using zustland store

make dynamic ollama model changes to router and evaluator ts too

* feat(ollama): fix evaluated options by de-duplicating it

* feat(ollama): make README.md change to reflect local model workflow

* feat(ollama): add base non-ollama docker compose file, add --local flag to start_simstudio_docker.sh with ollama service

* feat(ollama): fix README.md local model instructions

* feat(ollama): remove de-duplication logic and separate getModelProviders into two

* fix non-local init and translate.ts

* create combined docker-compose file and fix start_simstudio_docker script too

* update package-lock.json

* feat(ollama): fix README.md instructions and docker compose

---------

Co-authored-by: Arunabh Sharma <arunabh.sharma@supernal.aero>
This commit is contained in:
Arunabh Sharma
2025-03-29 13:34:44 -07:00
committed by GitHub
parent 272a486bcc
commit fe2c7d8d98
20 changed files with 691 additions and 148 deletions

3
.gitignore vendored
View File

@@ -63,3 +63,6 @@ next-env.d.ts
docs/.source
docs/.contentlayer
docs/.content-collections
# database instantiation
**/postgres_data/

View File

@@ -39,8 +39,12 @@ cd sim
# Create environment file and update with required environment variables (BETTER_AUTH_SECRET)
cp sim/.env.example sim/.env
# Start the Docker environment
docker compose up -d
# Start Sim Studio using the provided script
docker compose up -d --build
or
./start_simstudio_docker.sh
```
After running these commands:
@@ -66,6 +70,36 @@ After running these commands:
docker compose up -d --build
```
#### Working with Local Models
To use local models with Sim Studio, follow these steps:
1. **Pull Local Models**
```bash
# Run the ollama_docker.sh script to pull the required models
./sim/scripts/ollama_docker.sh pull <model_name>
```
2. **Start Sim Studio with Local Models**
```bash
#Start Sim Studio with local model support
./start_simstudio_docker.sh --local
# or
# Start Sim Studio with local model support if you have nvidia GPU
docker compose up --profile local-gpu -d --build
# or
# Start Sim Studio with local model support if you don't have nvidia GPU
docker compose up --profile local-cpu -d --build
```
The application will now be configured to use your local models. You can access it at [http://localhost:3000/w/](http://localhost:3000/w/).
### Option 2: Dev Containers
1. Open VS Code or your favorite VS Code fork (Cursor, Windsurf, etc.)

View File

@@ -46,5 +46,54 @@ services:
timeout: 5s
retries: 5
local-llm-gpu:
profiles:
- local-gpu # This profile requires both 'local' and 'gpu'
image: ollama/ollama:latest
pull_policy: always
volumes:
- ${HOME}/.ollama:/root/.ollama
ports:
- "11434:11434"
environment:
- NVIDIA_DRIVER_CAPABILITIES=all
- OLLAMA_LOAD_TIMEOUT=-1
- OLLAMA_KEEP_ALIVE=-1
- OLLAMA_DEBUG=1
command: "serve"
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/"]
interval: 10s
timeout: 5s
retries: 5
local-llm-cpu:
profiles:
- local-cpu # This profile requires both 'local' and 'cpu'
image: ollama/ollama:latest
pull_policy: always
volumes:
- ${HOME}/.ollama:/root/.ollama
ports:
- "11434:11434"
environment:
- OLLAMA_LOAD_TIMEOUT=-1
- OLLAMA_KEEP_ALIVE=-1
- OLLAMA_DEBUG=1
command: "serve"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/"]
interval: 10s
timeout: 5s
retries: 5
volumes:
postgres_data:

View File

@@ -1,4 +1,4 @@
import { useEffect } from 'react'
import { useEffect, useMemo } from 'react'
import {
Select,
SelectContent,
@@ -9,7 +9,7 @@ import {
import { useSubBlockValue } from '../hooks/use-sub-block-value'
interface DropdownProps {
options: Array<string | { label: string; id: string }>
options: Array<string | { label: string; id: string }> | (() => Array<string | { label: string; id: string }>)
defaultValue?: string
blockId: string
subBlockId: string
@@ -18,14 +18,19 @@ interface DropdownProps {
export function Dropdown({ options, defaultValue, blockId, subBlockId }: DropdownProps) {
const [value, setValue] = useSubBlockValue(blockId, subBlockId, true)
// Evaluate options if it's a function
const evaluatedOptions = useMemo(() => {
return typeof options === 'function' ? options() : options
}, [options])
// Set the value to the first option if it's not set
useEffect(() => {
if (!value && options.length > 0) {
const firstOption = options[0]
if (!value && evaluatedOptions.length > 0) {
const firstOption = evaluatedOptions[0]
const firstValue = typeof firstOption === 'string' ? firstOption : firstOption.id
setValue(firstValue)
}
}, [value, options, defaultValue, setValue])
}, [value, evaluatedOptions, defaultValue, setValue])
const getOptionValue = (option: string | { label: string; id: string }) => {
return typeof option === 'string' ? option : option.id
@@ -38,14 +43,14 @@ export function Dropdown({ options, defaultValue, blockId, subBlockId }: Dropdow
return (
<Select
value={value as string | undefined}
defaultValue={defaultValue ?? getOptionValue(options[0])}
defaultValue={defaultValue ?? getOptionValue(evaluatedOptions[0])}
onValueChange={(value) => setValue(value)}
>
<SelectTrigger className="text-left">
<SelectValue placeholder="Select an option" />
</SelectTrigger>
<SelectContent className="max-h-48">
{options.map((option) => (
{evaluatedOptions.map((option) => (
<SelectItem key={getOptionValue(option)} value={getOptionValue(option)}>
{getOptionLabel(option)}
</SelectItem>

View File

@@ -1,8 +1,9 @@
import { AgentIcon } from '@/components/icons'
import { MODELS_TEMP_RANGE_0_1, MODELS_TEMP_RANGE_0_2 } from '@/providers/model-capabilities'
import { MODEL_PROVIDERS } from '@/providers/utils'
import { getAllModelProviders, getBaseModelProviders } from '@/providers/utils'
import { ToolResponse } from '@/tools/types'
import { BlockConfig } from '../types'
import { useOllamaStore } from '@/stores/ollama/store'
interface AgentResponse extends ToolResponse {
output: {
@@ -52,7 +53,11 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
title: 'Model',
type: 'dropdown',
layout: 'half',
options: Object.keys(MODEL_PROVIDERS),
options: () => {
const ollamaModels = useOllamaStore.getState().models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels]
},
},
{
id: 'temperature',
@@ -116,7 +121,7 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
if (!model) {
throw new Error('No model selected')
}
const tool = MODEL_PROVIDERS[model]
const tool = getAllModelProviders()[model]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)
}

View File

@@ -1,9 +1,10 @@
import { ChartBarIcon } from '@/components/icons'
import { createLogger } from '@/lib/logs/console-logger'
import { ProviderId } from '@/providers/types'
import { MODEL_PROVIDERS } from '@/providers/utils'
import { getBaseModelProviders, getAllModelProviders } from '@/providers/utils'
import { ToolResponse } from '@/tools/types'
import { BlockConfig, ParamType } from '../types'
import { useOllamaStore } from '@/stores/ollama/store'
const logger = createLogger('EvaluatorBlock')
@@ -146,7 +147,11 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
title: 'Model',
type: 'dropdown',
layout: 'half',
options: Object.keys(MODEL_PROVIDERS),
options: () => {
const ollamaModels = useOllamaStore.getState().models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels]
},
},
{
id: 'apiKey',
@@ -218,7 +223,7 @@ export const EvaluatorBlock: BlockConfig<EvaluatorResponse> = {
if (!model) {
throw new Error('No model selected')
}
const tool = MODEL_PROVIDERS[model as ProviderId]
const tool = getAllModelProviders()[model as ProviderId]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)
}

View File

@@ -1,8 +1,9 @@
import { ConnectIcon } from '@/components/icons'
import { ProviderId } from '@/providers/types'
import { MODEL_PROVIDERS } from '@/providers/utils'
import { getBaseModelProviders, getAllModelProviders } from '@/providers/utils'
import { ToolResponse } from '@/tools/types'
import { BlockConfig } from '../types'
import { useOllamaStore } from '@/stores/ollama/store'
interface RouterResponse extends ToolResponse {
output: {
@@ -108,7 +109,11 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
title: 'Model',
type: 'dropdown',
layout: 'half',
options: Object.keys(MODEL_PROVIDERS),
options: () => {
const ollamaModels = useOllamaStore.getState().models
const baseModels = Object.keys(getBaseModelProviders())
return [...baseModels, ...ollamaModels]
},
},
{
id: 'apiKey',
@@ -145,7 +150,7 @@ export const RouterBlock: BlockConfig<RouterResponse> = {
if (!model) {
throw new Error('No model selected')
}
const tool = MODEL_PROVIDERS[model as ProviderId]
const tool = getAllModelProviders()[model as ProviderId]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)
}

View File

@@ -1,6 +1,6 @@
import { TranslateIcon } from '@/components/icons'
import { ProviderId } from '@/providers/types'
import { MODEL_PROVIDERS } from '@/providers/utils'
import { getBaseModelProviders } from '@/providers/utils'
import { BlockConfig } from '../types'
const getTranslationPrompt = (
@@ -43,7 +43,7 @@ export const TranslateBlock: BlockConfig = {
title: 'Model',
type: 'dropdown',
layout: 'half',
options: Object.keys(MODEL_PROVIDERS),
options: Object.keys(getBaseModelProviders()),
},
{
id: 'apiKey',
@@ -75,7 +75,7 @@ export const TranslateBlock: BlockConfig = {
throw new Error('No model selected')
}
const tool = MODEL_PROVIDERS[model as ProviderId]
const tool = getBaseModelProviders()[model as ProviderId]
if (!tool) {
throw new Error(`Invalid model selected: ${model}`)

View File

@@ -84,7 +84,7 @@ export interface SubBlockConfig {
title?: string
type: SubBlockType
layout?: SubBlockLayout
options?: string[] | { label: string; id: string }[]
options?: string[] | { label: string; id: string }[] | (() => string[] | { label: string; id: string }[])
min?: number
max?: number
columns?: string[]

160
sim/package-lock.json generated
View File

@@ -52,7 +52,7 @@
"lucide-react": "^0.469.0",
"next": "^15.2.4",
"next-themes": "^0.4.6",
"openai": "^4.85.4",
"openai": "^4.89.0",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",
"react": "^18.2.0",
@@ -143,9 +143,9 @@
}
},
"node_modules/@anthropic-ai/sdk/node_modules/@types/node": {
"version": "18.19.83",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.83.tgz",
"integrity": "sha512-D69JeR5SfFS5H6FLbUaS0vE4r1dGhmMBbG4Ed6BNS4wkDK8GZjsdCShT5LCN59vOHEUHnFCY9J4aclXlIphMkA==",
"version": "18.19.84",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.84.tgz",
"integrity": "sha512-ACYy2HGcZPHxEeWTqowTF7dhXN+JU1o7Gr4b41klnn6pj2LD6rsiGqSZojMdk1Jh2ys3m76ap+ae1vvE4+5+vg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
@@ -381,16 +381,16 @@
}
},
"node_modules/@aws-sdk/client-s3": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.775.0.tgz",
"integrity": "sha512-Z/BeVmYc3nj4FNE46MtvBYeCVvBZwlujMEvr5UOChP14899QWkBfOvf74RwQY9qy5/DvhVFkHlA8en1L6+0NrA==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-s3/-/client-s3-3.777.0.tgz",
"integrity": "sha512-KVX2QD6lLczZxtzIRCpmztgNnGq+spiMIDYqkum/rCBjCX1YJoDHwMYXaMf2EtAH8tFkJmBiA/CiT/J36iN7Xg==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha1-browser": "5.2.0",
"@aws-crypto/sha256-browser": "5.2.0",
"@aws-crypto/sha256-js": "5.2.0",
"@aws-sdk/core": "3.775.0",
"@aws-sdk/credential-provider-node": "3.775.0",
"@aws-sdk/credential-provider-node": "3.777.0",
"@aws-sdk/middleware-bucket-endpoint": "3.775.0",
"@aws-sdk/middleware-expect-continue": "3.775.0",
"@aws-sdk/middleware-flexible-checksums": "3.775.0",
@@ -448,9 +448,9 @@
}
},
"node_modules/@aws-sdk/client-sso": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.775.0.tgz",
"integrity": "sha512-vqG1S2ap77WP4D5qt4bEPE0duQ4myN+cDr1NeP8QpSTajetbkDGVo7h1VViYMcUoFUVWBj6Qf1X1VfOq+uaxbA==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/client-sso/-/client-sso-3.777.0.tgz",
"integrity": "sha512-0+z6CiAYIQa7s6FJ+dpBYPi9zr9yY5jBg/4/FGcwYbmqWPXwL9Thdtr0FearYRZgKl7bhL3m3dILCCfWqr3teQ==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
@@ -556,18 +556,18 @@
}
},
"node_modules/@aws-sdk/credential-provider-ini": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.775.0.tgz",
"integrity": "sha512-0gJc6cALsgrjeC5U3qDjbz4myIC/j49+gPz9nkvY+C0OYWt1KH1tyfiZUuCRGfuFHhQ+3KMMDSL229TkBP3E7g==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.777.0.tgz",
"integrity": "sha512-1X9mCuM9JSQPmQ+D2TODt4THy6aJWCNiURkmKmTIPRdno7EIKgAqrr/LLN++K5mBf54DZVKpqcJutXU2jwo01A==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/core": "3.775.0",
"@aws-sdk/credential-provider-env": "3.775.0",
"@aws-sdk/credential-provider-http": "3.775.0",
"@aws-sdk/credential-provider-process": "3.775.0",
"@aws-sdk/credential-provider-sso": "3.775.0",
"@aws-sdk/credential-provider-web-identity": "3.775.0",
"@aws-sdk/nested-clients": "3.775.0",
"@aws-sdk/credential-provider-sso": "3.777.0",
"@aws-sdk/credential-provider-web-identity": "3.777.0",
"@aws-sdk/nested-clients": "3.777.0",
"@aws-sdk/types": "3.775.0",
"@smithy/credential-provider-imds": "^4.0.2",
"@smithy/property-provider": "^4.0.2",
@@ -580,17 +580,17 @@
}
},
"node_modules/@aws-sdk/credential-provider-node": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.775.0.tgz",
"integrity": "sha512-D8Zre5W2sXC/ANPqCWPqwYpU1cKY9DF6ckFZyDrqlcBC0gANgpY6fLrBtYo2fwJsbj+1A24iIpBINV7erdprgA==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-node/-/credential-provider-node-3.777.0.tgz",
"integrity": "sha512-ZD66ywx1Q0KyUSuBXZIQzBe3Q7MzX8lNwsrCU43H3Fww+Y+HB3Ncws9grhSdNhKQNeGmZ+MgKybuZYaaeLwJEQ==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/credential-provider-env": "3.775.0",
"@aws-sdk/credential-provider-http": "3.775.0",
"@aws-sdk/credential-provider-ini": "3.775.0",
"@aws-sdk/credential-provider-ini": "3.777.0",
"@aws-sdk/credential-provider-process": "3.775.0",
"@aws-sdk/credential-provider-sso": "3.775.0",
"@aws-sdk/credential-provider-web-identity": "3.775.0",
"@aws-sdk/credential-provider-sso": "3.777.0",
"@aws-sdk/credential-provider-web-identity": "3.777.0",
"@aws-sdk/types": "3.775.0",
"@smithy/credential-provider-imds": "^4.0.2",
"@smithy/property-provider": "^4.0.2",
@@ -620,14 +620,14 @@
}
},
"node_modules/@aws-sdk/credential-provider-sso": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.775.0.tgz",
"integrity": "sha512-du06V7u9HDmRuwZnRjf85shO3dffeKOkQplV5/2vf3LgTPNEI9caNomi/cCGyxKGOeSUHAKrQ1HvpPfOaI6t5Q==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.777.0.tgz",
"integrity": "sha512-9mPz7vk9uE4PBVprfINv4tlTkyq1OonNevx2DiXC1LY4mCUCNN3RdBwAY0BTLzj0uyc3k5KxFFNbn3/8ZDQP7w==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/client-sso": "3.775.0",
"@aws-sdk/client-sso": "3.777.0",
"@aws-sdk/core": "3.775.0",
"@aws-sdk/token-providers": "3.775.0",
"@aws-sdk/token-providers": "3.777.0",
"@aws-sdk/types": "3.775.0",
"@smithy/property-provider": "^4.0.2",
"@smithy/shared-ini-file-loader": "^4.0.2",
@@ -639,13 +639,13 @@
}
},
"node_modules/@aws-sdk/credential-provider-web-identity": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.775.0.tgz",
"integrity": "sha512-z4XLYui5aHsr78mbd5BtZfm55OM5V55qK/X17OPrEqjYDDk3GlI8Oe2ZjTmOVrKwMpmzXKhsakeFHKfDyOvv1A==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.777.0.tgz",
"integrity": "sha512-uGCqr47fnthkqwq5luNl2dksgcpHHjSXz2jUra7TXtFOpqvnhOW8qXjoa1ivlkq8qhqlaZwCzPdbcN0lXpmLzQ==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/core": "3.775.0",
"@aws-sdk/nested-clients": "3.775.0",
"@aws-sdk/nested-clients": "3.777.0",
"@aws-sdk/types": "3.775.0",
"@smithy/property-provider": "^4.0.2",
"@smithy/types": "^4.2.0",
@@ -828,9 +828,9 @@
}
},
"node_modules/@aws-sdk/nested-clients": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.775.0.tgz",
"integrity": "sha512-f37jmAzkuIhKyhtA6s0LGpqQvm218vq+RNMUDkGm1Zz2fxJ5pBIUTDtygiI3vXTcmt9DTIB8S6JQhjrgtboktw==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/nested-clients/-/nested-clients-3.777.0.tgz",
"integrity": "sha512-bmmVRsCjuYlStYPt06hr+f8iEyWg7+AklKCA8ZLDEJujXhXIowgUIqXmqpTkXwkVvDQ9tzU7hxaONjyaQCGybA==",
"license": "Apache-2.0",
"dependencies": {
"@aws-crypto/sha256-browser": "5.2.0",
@@ -911,12 +911,12 @@
}
},
"node_modules/@aws-sdk/token-providers": {
"version": "3.775.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.775.0.tgz",
"integrity": "sha512-Q6MtbEhkOggVSz/dN89rIY/ry80U3v89o0Lrrc+Rpvaiaaz8pEN0DsfEcg0IjpzBQ8Owoa6lNWyglHbzPhaJpA==",
"version": "3.777.0",
"resolved": "https://registry.npmjs.org/@aws-sdk/token-providers/-/token-providers-3.777.0.tgz",
"integrity": "sha512-Yc2cDONsHOa4dTSGOev6Ng2QgTKQUEjaUnsyKd13pc/nLLz/WLqHiQ/o7PcnKERJxXGs1g1C6l3sNXiX+kbnFQ==",
"license": "Apache-2.0",
"dependencies": {
"@aws-sdk/nested-clients": "3.775.0",
"@aws-sdk/nested-clients": "3.777.0",
"@aws-sdk/types": "3.775.0",
"@smithy/property-provider": "^4.0.2",
"@smithy/shared-ini-file-loader": "^4.0.2",
@@ -1354,9 +1354,9 @@
}
},
"node_modules/@cerebras/cerebras_cloud_sdk/node_modules/@types/node": {
"version": "18.19.83",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.83.tgz",
"integrity": "sha512-D69JeR5SfFS5H6FLbUaS0vE4r1dGhmMBbG4Ed6BNS4wkDK8GZjsdCShT5LCN59vOHEUHnFCY9J4aclXlIphMkA==",
"version": "18.19.84",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.84.tgz",
"integrity": "sha512-ACYy2HGcZPHxEeWTqowTF7dhXN+JU1o7Gr4b41klnn6pj2LD6rsiGqSZojMdk1Jh2ys3m76ap+ae1vvE4+5+vg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
@@ -1491,9 +1491,9 @@
"license": "Apache-2.0"
},
"node_modules/@emnapi/runtime": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.3.1.tgz",
"integrity": "sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.4.0.tgz",
"integrity": "sha512-64WYIf4UYcdLnbKn/umDlNjQDSS8AgZrI/R9+x5ilkUVFxXcA1Ebl+gQLc/6mERA4407Xof0R7wEyEuj091CVw==",
"dev": true,
"license": "MIT",
"optional": true,
@@ -6167,9 +6167,9 @@
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.17.27",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.27.tgz",
"integrity": "sha512-U58sbKhDrthHlxHRJw7ZLiLDZGmAUOZUbpw0S6nL27sYUdhvgBLCRu/keSd6qcTsfArd1sRFCCBxzWATGr/0UA==",
"version": "20.17.28",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.28.tgz",
"integrity": "sha512-DHlH/fNL6Mho38jTy7/JT7sn2wnXI+wULR6PV4gy4VHLVvnrV/d3pHAMQHhc4gjdLmK2ZiPoMxzp6B3yRajLSQ==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.19.2"
@@ -6534,14 +6534,14 @@
}
},
"node_modules/asn1js": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz",
"integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==",
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.6.tgz",
"integrity": "sha512-UOCGPYbl0tv8+006qks/dTgV9ajs97X2p0FAbyS2iyCRrmLSRolDaHdp+v/CLgnzHc3fVB+CwYiUmei7ndFcgA==",
"license": "BSD-3-Clause",
"dependencies": {
"pvtsutils": "^1.3.2",
"pvtsutils": "^1.3.6",
"pvutils": "^1.1.3",
"tslib": "^2.4.0"
"tslib": "^2.8.1"
},
"engines": {
"node": ">=12.0.0"
@@ -7567,9 +7567,9 @@
}
},
"node_modules/drizzle-kit": {
"version": "0.30.5",
"resolved": "https://registry.npmjs.org/drizzle-kit/-/drizzle-kit-0.30.5.tgz",
"integrity": "sha512-l6dMSE100u7sDaTbLczibrQZjA35jLsHNqIV+jmhNVO3O8jzM6kywMOmV9uOz9ZVSCMPQhAZEFjL/qDPVrqpUA==",
"version": "0.30.6",
"resolved": "https://registry.npmjs.org/drizzle-kit/-/drizzle-kit-0.30.6.tgz",
"integrity": "sha512-U4wWit0fyZuGuP7iNmRleQyK2V8wCuv57vf5l3MnG4z4fzNTjY/U13M8owyQ5RavqvqxBifWORaR3wIUzlN64g==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -7725,9 +7725,9 @@
"license": "MIT"
},
"node_modules/electron-to-chromium": {
"version": "1.5.125",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.125.tgz",
"integrity": "sha512-A2+qEsSUc95QvyFDl7PNwkDDNphIKBVfBBtWWkPGRbiWEgzLo0SvLygYF6HgzVduHd+4WGPB/WD64POFgwzY3g==",
"version": "1.5.128",
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.128.tgz",
"integrity": "sha512-bo1A4HH/NS522Ws0QNFIzyPcyUUNV/yyy70Ho1xqfGYzPUme2F/xr4tlEOuM6/A538U1vDA7a4XfCd1CKRegKQ==",
"dev": true,
"license": "ISC"
},
@@ -8418,9 +8418,9 @@
}
},
"node_modules/groq-sdk/node_modules/@types/node": {
"version": "18.19.83",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.83.tgz",
"integrity": "sha512-D69JeR5SfFS5H6FLbUaS0vE4r1dGhmMBbG4Ed6BNS4wkDK8GZjsdCShT5LCN59vOHEUHnFCY9J4aclXlIphMkA==",
"version": "18.19.84",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.84.tgz",
"integrity": "sha512-ACYy2HGcZPHxEeWTqowTF7dhXN+JU1o7Gr4b41klnn6pj2LD6rsiGqSZojMdk1Jh2ys3m76ap+ae1vvE4+5+vg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
@@ -9739,9 +9739,9 @@
}
},
"node_modules/nwsapi": {
"version": "2.2.19",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.19.tgz",
"integrity": "sha512-94bcyI3RsqiZufXjkr3ltkI86iEl+I7uiHVDtcq9wJUTwYQJ5odHDeSzkkrRzi80jJ8MaeZgqKjH1bAWAFw9bA==",
"version": "2.2.20",
"resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz",
"integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==",
"dev": true,
"license": "MIT"
},
@@ -9780,9 +9780,9 @@
}
},
"node_modules/openai": {
"version": "4.89.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.89.1.tgz",
"integrity": "sha512-k6t7WfnodIctPo40/9sy7Ww4VypnfkKi/urO2VQx4trCIwgzeroO1jkaCL2f5MyTS1H3HT9X+M2qLsc7NSXwTw==",
"version": "4.90.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.90.0.tgz",
"integrity": "sha512-YCuHMMycqtCg1B8G9ezkOF0j8UnBWD3Al/zYaelpuXwU1yhCEv+Y4n9G20MnyGy6cH4GsFwOMrgstQ+bgG1PtA==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
@@ -9810,9 +9810,9 @@
}
},
"node_modules/openai/node_modules/@types/node": {
"version": "18.19.83",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.83.tgz",
"integrity": "sha512-D69JeR5SfFS5H6FLbUaS0vE4r1dGhmMBbG4Ed6BNS4wkDK8GZjsdCShT5LCN59vOHEUHnFCY9J4aclXlIphMkA==",
"version": "18.19.84",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.84.tgz",
"integrity": "sha512-ACYy2HGcZPHxEeWTqowTF7dhXN+JU1o7Gr4b41klnn6pj2LD6rsiGqSZojMdk1Jh2ys3m76ap+ae1vvE4+5+vg==",
"license": "MIT",
"dependencies": {
"undici-types": "~5.26.4"
@@ -10073,9 +10073,9 @@
}
},
"node_modules/pirates": {
"version": "4.0.6",
"resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz",
"integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==",
"version": "4.0.7",
"resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz",
"integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==",
"license": "MIT",
"engines": {
"node": ">= 6"
@@ -11210,9 +11210,9 @@
}
},
"node_modules/react-hook-form": {
"version": "7.54.2",
"resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.54.2.tgz",
"integrity": "sha512-eHpAUgUjWbZocoQYUHposymRb4ZP6d0uwUnooL2uOybA9/3tPUvoAKqEWK1WaSiTxxOfTpffNZP7QwlnM3/gEg==",
"version": "7.55.0",
"resolved": "https://registry.npmjs.org/react-hook-form/-/react-hook-form-7.55.0.tgz",
"integrity": "sha512-XRnjsH3GVMQz1moZTW53MxfoWN7aDpUg/GpVNc4A3eXRVNdGXfbzJ4vM4aLQ8g6XCUh1nIbx70aaNCl7kxnjog==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
@@ -12734,9 +12734,9 @@
}
},
"node_modules/use-sync-external-store": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.4.0.tgz",
"integrity": "sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw==",
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.5.0.tgz",
"integrity": "sha512-Rb46I4cGGVBmjamjphe8L/UnvJD+uPPtTkNvX5mZgqdbavhI4EbgIWJiIHXJ8bc/i9EQGPRh4DwEURJ552Do0A==",
"license": "MIT",
"peerDependencies": {
"react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"

View File

@@ -66,7 +66,7 @@
"lucide-react": "^0.469.0",
"next": "^15.2.4",
"next-themes": "^0.4.6",
"openai": "^4.85.4",
"openai": "^4.89.0",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",
"react": "^18.2.0",

View File

@@ -35,7 +35,6 @@ export async function executeProviderRequest(
if (!provider.executeRequest) {
throw new Error(`Provider ${providerId} does not implement executeRequest`)
}
const sanitizedRequest = sanitizeRequest(request)
// If responseFormat is provided, modify the system prompt to enforce structured output

View File

@@ -0,0 +1,299 @@
import OpenAI from 'openai'
import { createLogger } from '@/lib/logs/console-logger'
import { executeTool } from '@/tools'
import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '../types'
import { ModelsObject } from './types'
import { useOllamaStore } from '@/stores/ollama/store'
const logger = createLogger('Ollama Provider')
const OLLAMA_HOST = 'http://localhost:11434'
export const ollamaProvider: ProviderConfig = {
id: 'ollama',
name: 'Ollama',
description: 'Local Ollama server for LLM inference',
version: '1.0.0',
models: [], // Will be populated dynamically
defaultModel: '',
// Initialize the provider by fetching available models
async initialize() {
try {
const response = await fetch(`${OLLAMA_HOST}/api/tags`)
if (!response.ok) {
useOllamaStore.getState().setModels([])
logger.warn('Ollama service is not available. The provider will be disabled.')
return
}
const data = await response.json() as ModelsObject
this.models = data.models.map((model) => model.name)
useOllamaStore.getState().setModels(this.models)
} catch (error) {
logger.warn('Ollama model instantiation failed. The provider will be disabled.', {
error: error instanceof Error ? error.message : 'Unknown error'
})
}
},
executeRequest: async (request: ProviderRequest): Promise<ProviderResponse> => {
console.log(request)
logger.info('Preparing Ollama request', {
model: request.model,
hasSystemPrompt: !!request.systemPrompt,
hasMessages: !!request.context,
hasTools: !!request.tools?.length,
toolCount: request.tools?.length || 0,
hasResponseFormat: !!request.responseFormat,
})
const startTime = Date.now()
const timeSegments: TimeSegment[] = []
try {
// Prepare messages array
const ollama = new OpenAI({
apiKey: "empty",
baseURL: `${OLLAMA_HOST}/v1`,
dangerouslyAllowBrowser: true,
})
// Start with an empty array for all messages
const allMessages = []
// Add system prompt if present
if (request.systemPrompt) {
allMessages.push({ role: 'system', content: request.systemPrompt })
}
// Add context if present
if (request.context) {
allMessages.push({ role: 'user', content: request.context })
}
// Add remaining messages
if (request.messages) {
allMessages.push(...request.messages)
}
// Transform tools to OpenAI format if provided
const tools = request.tools?.length
? request.tools.map((tool) => ({
type: 'function',
function: {
name: tool.id,
description: tool.description,
parameters: tool.parameters,
},
}))
: undefined
const payload: any = {
model: request.model,
messages: allMessages,
}
// Add optional parameters
if (request.temperature !== undefined) payload.temperature = request.temperature
if (request.maxTokens !== undefined) payload.max_tokens = request.maxTokens
// Add tools if provided
if (tools?.length) {
payload.tools = tools
payload.tool_choice = 'auto'
}
// Make the initial API request
const initialCallTime = Date.now()
let currentResponse = await ollama.chat.completions.create(payload)
const firstResponseTime = Date.now() - initialCallTime
let content = currentResponse.choices[0]?.message?.content || ''
// Clean up the response content if it exists
if (content) {
content = content.replace(/```json\n?|\n?```/g, '')
content = content.trim()
}
let tokens = {
prompt: currentResponse.usage?.prompt_tokens || 0,
completion: currentResponse.usage?.completion_tokens || 0,
total: currentResponse.usage?.total_tokens || 0,
}
let toolCalls = []
let toolResults = []
let currentMessages = [...allMessages]
let iterationCount = 0
const MAX_ITERATIONS = 10 // Prevent infinite loops
// Track time spent in model vs tools
let modelTime = firstResponseTime
let toolsTime = 0
// Track each model and tool call segment with timestamps
const timeSegments: TimeSegment[] = [
{
type: 'model',
name: 'Initial response',
startTime: initialCallTime,
endTime: initialCallTime + firstResponseTime,
duration: firstResponseTime,
},
]
try {
while (iterationCount < MAX_ITERATIONS) {
// Check for tool calls
const toolCallsInResponse = currentResponse.choices[0]?.message?.tool_calls
if (!toolCallsInResponse || toolCallsInResponse.length === 0) {
break
}
// Track time for tool calls in this batch
const toolsStartTime = Date.now()
// Process each tool call
for (const toolCall of toolCallsInResponse) {
try {
const toolName = toolCall.function.name
const toolArgs = JSON.parse(toolCall.function.arguments)
// Get the tool from the tools registry
const tool = request.tools?.find((t) => t.id === toolName)
if (!tool) continue
// Execute the tool
const toolCallStartTime = Date.now()
const mergedArgs = { ...tool.params, ...toolArgs }
const result = await executeTool(toolName, mergedArgs)
const toolCallEndTime = Date.now()
const toolCallDuration = toolCallEndTime - toolCallStartTime
if (!result.success) continue
// Add to time segments
timeSegments.push({
type: 'tool',
name: toolName,
startTime: toolCallStartTime,
endTime: toolCallEndTime,
duration: toolCallDuration,
})
toolResults.push(result.output)
toolCalls.push({
name: toolName,
arguments: toolArgs,
startTime: new Date(toolCallStartTime).toISOString(),
endTime: new Date(toolCallEndTime).toISOString(),
duration: toolCallDuration,
result: result.output,
})
// Add the tool call and result to messages
currentMessages.push({
role: 'assistant',
content: null,
tool_calls: [
{
id: toolCall.id,
type: 'function',
function: {
name: toolName,
arguments: toolCall.function.arguments,
},
},
],
})
currentMessages.push({
role: 'tool',
tool_call_id: toolCall.id,
content: JSON.stringify(result.output),
})
} catch (error) {
logger.error('Error processing tool call:', { error })
}
}
// Calculate tool call time for this iteration
const thisToolsTime = Date.now() - toolsStartTime
toolsTime += thisToolsTime
// Make the next request with updated messages
const nextPayload = {
...payload,
messages: currentMessages,
}
// Time the next model call
const nextModelStartTime = Date.now()
// Make the next request
currentResponse = await ollama.chat.completions.create(nextPayload)
const nextModelEndTime = Date.now()
const thisModelTime = nextModelEndTime - nextModelStartTime
// Add to time segments
timeSegments.push({
type: 'model',
name: `Model response (iteration ${iterationCount + 1})`,
startTime: nextModelStartTime,
endTime: nextModelEndTime,
duration: thisModelTime,
})
// Add to model time
modelTime += thisModelTime
// Update content if we have a text response
if (currentResponse.choices[0]?.message?.content) {
content = currentResponse.choices[0].message.content
// Clean up the response content
content = content.replace(/```json\n?|\n?```/g, '')
content = content.trim()
}
// Update token counts
if (currentResponse.usage) {
tokens.prompt += currentResponse.usage.prompt_tokens || 0
tokens.completion += currentResponse.usage.completion_tokens || 0
tokens.total += currentResponse.usage.total_tokens || 0
}
iterationCount++
}
} catch (error) {
logger.error('Error in Ollama request:', { error })
}
const endTime = Date.now()
return {
content: content,
model: request.model,
tokens,
toolCalls: toolCalls.length > 0 ? toolCalls : undefined,
toolResults: toolResults.length > 0 ? toolResults : undefined,
timing: {
startTime: new Date(startTime).toISOString(),
endTime: new Date(endTime).toISOString(),
duration: endTime - startTime,
modelTime: modelTime,
toolsTime: toolsTime,
firstResponseTime: firstResponseTime,
iterations: iterationCount + 1,
timeSegments,
},
}
} catch (error) {
logger.error('Error in Ollama request', {
error: error instanceof Error ? error.message : 'Unknown error',
model: request.model,
})
throw error
}
},
}

View File

@@ -0,0 +1,12 @@
interface Model {
name: string;
model: string;
modified_at: string;
size: number;
digest: string;
details: object;
}
export interface ModelsObject {
models: Model[];
}

View File

@@ -6,6 +6,7 @@ export type ProviderId =
| 'xai'
| 'cerebras'
| 'groq'
| 'ollama'
/**
* Model pricing information per million tokens
@@ -40,6 +41,7 @@ export interface ProviderConfig {
version: string
models: string[]
defaultModel: string
initialize?: () => Promise<void>
executeRequest?: (request: ProviderRequest) => Promise<ProviderResponse>
}

View File

@@ -6,6 +6,7 @@ import { deepseekProvider } from './deepseek'
import { googleProvider } from './google'
import { groqProvider } from './groq'
import { openaiProvider } from './openai'
import { ollamaProvider } from './ollama'
import { getModelPricing } from './pricing'
import { ProviderConfig, ProviderId, ProviderToolConfig } from './types'
import { xAIProvider } from './xai'
@@ -40,7 +41,7 @@ export const providers: Record<
deepseek: {
...deepseekProvider,
models: ['deepseek-v3', 'deepseek-r1'],
modelPatterns: [/^deepseek/],
modelPatterns: [],
},
xai: {
...xAIProvider,
@@ -61,13 +62,34 @@ export const providers: Record<
],
modelPatterns: [/^groq/],
},
ollama: {
...ollamaProvider,
models: [],
modelPatterns: [],
},
}
/**
* Direct mapping from model names to provider IDs
* Automatically generated from the providers configuration
*/
export const MODEL_PROVIDERS: Record<string, ProviderId> = Object.entries(providers).reduce(
// Initialize all providers that have initialize method
Object.entries(providers).forEach(([id, provider]) => {
if (provider.initialize) {
provider.initialize().catch(error => {
logger.error(`Failed to initialize ${id} provider`, {
error: error instanceof Error ? error.message : 'Unknown error'
})
})
}
})
// Function to update Ollama provider models
export function updateOllamaProviderModels(models: string[]): void {
providers.ollama.models = models
logger.info('Updated Ollama provider models', { models })
}
export function getBaseModelProviders(): Record<string, ProviderId> {
return Object.entries(providers)
.filter(([providerId]) => providerId !== 'ollama')
.reduce(
(map, [providerId, config]) => {
config.models.forEach((model) => {
map[model.toLowerCase()] = providerId as ProviderId
@@ -76,11 +98,24 @@ export const MODEL_PROVIDERS: Record<string, ProviderId> = Object.entries(provid
},
{} as Record<string, ProviderId>
)
}
export function getAllModelProviders(): Record<string, ProviderId> {
return Object.entries(providers).reduce(
(map, [providerId, config]) => {
config.models.forEach((model) => {
map[model.toLowerCase()] = providerId as ProviderId
})
return map
},
{} as Record<string, ProviderId>
)
}
export function getProviderFromModel(model: string): ProviderId {
const normalizedModel = model.toLowerCase()
if (normalizedModel in MODEL_PROVIDERS) {
return MODEL_PROVIDERS[normalizedModel]
if (normalizedModel in getAllModelProviders()) {
return getAllModelProviders()[normalizedModel]
}
for (const [providerId, config] of Object.entries(providers)) {
@@ -93,8 +128,8 @@ export function getProviderFromModel(model: string): ProviderId {
}
}
logger.warn(`No provider found for model: ${model}, defaulting to deepseek`)
return 'deepseek'
logger.warn(`No provider found for model: ${model}, defaulting to ollama`)
return 'ollama'
}
export function getProvider(id: string): ProviderConfig | undefined {

25
sim/scripts/ollama_docker.sh Executable file
View File

@@ -0,0 +1,25 @@
#!/bin/bash
set -e
# Check that at least one argument is provided. If not, display the usage help.
if [ "$#" -eq 0 ]; then
echo "Usage: $(basename "$0") <ollama command> [args...]"
echo "Example: $(basename "$0") ps # This will run 'ollama ps' inside the container"
exit 1
fi
# Start a detached container from the ollama/ollama image,
# mounting the host's ~/.ollama directory directly into the container.
# Here we mount it to /root/.ollama, assuming that's where the image expects it.
CONTAINER_ID=$(docker run -d -v ~/.ollama:/root/.ollama -p 11434:11434 ollama/ollama
)
# Define a cleanup function to stop the container regardless of how the script exits.
cleanup() {
docker stop "$CONTAINER_ID" >/dev/null
}
trap cleanup EXIT
# Execute the command provided by the user within the running container.
# The command runs as: "ollama <user-arguments>"
docker exec -it "$CONTAINER_ID" ollama "$@"

View File

@@ -1,25 +0,0 @@
#!/bin/bash
# Check if .env file exists, if not, create from example
if [ ! -f .env ]; then
echo "Creating .env file from .env.example..."
cp .env.example .env
echo "Please update .env file with your configuration."
fi
# Stop any running containers
docker compose down
# Build and start containers in detached mode
docker compose up --build -d
# Wait for database to be ready
echo "Waiting for database to be ready..."
sleep 5
# Apply migrations automatically
echo "Applying database migrations..."
docker compose exec simstudio npm run db:push
echo "Sim Studio is now running at http://localhost:3000"
echo "To view logs, run: docker compose logs -f simstudio"

View File

@@ -0,0 +1,20 @@
import { create } from 'zustand'
import { createLogger } from '@/lib/logs/console-logger'
import { updateOllamaProviderModels } from '@/providers/utils'
const logger = createLogger('OllamaStore')
interface OllamaState {
models: string[]
setModels: (models: string[]) => void
}
export const useOllamaStore = create<OllamaState>((set) => ({
models: [],
setModels: (models) => {
logger.info('Updating Ollama models', { models })
set({ models })
// Update the providers when models change
updateOllamaProviderModels(models)
},
}))

70
start_simstudio_docker.sh Executable file
View File

@@ -0,0 +1,70 @@
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
SIM_DIR=$SCRIPT_DIR/sim
# Function to display help
show_help() {
echo "Usage: $0 [OPTIONS]"
echo
echo "Start Sim Studio with Docker containers"
echo
echo "Options:"
echo " -h, --help Show this help message"
echo " --local Use local LLM configuration with Ollama service"
echo
echo "Examples:"
echo " $0 # Start without local LLM"
echo " $0 --local # Start with local LLM (requires GPU)"
echo
echo "Note: When using --local flag, GPU availability is automatically detected"
echo " and appropriate configuration is used."
exit 0
}
# Parse command line arguments
LOCAL=false
while [[ "$#" -gt 0 ]]; do
case $1 in
-h|--help) show_help ;;
--local) LOCAL=true ;;
*) echo "Unknown parameter: $1"; echo "Use -h or --help for usage information"; exit 1 ;;
esac
shift
done
# Check if .env file exists, if not, create from example
if [ ! -f $SIM_DIR/.env ]; then
echo "Creating .env file from .env.example..."
cp $SIM_DIR/.env.example $SIM_DIR/.env
echo "Please update .env file with your configuration."
else
echo ".env file found."
fi
# Stop any running containers
docker compose down
# Build and start containers
if [ "$LOCAL" = true ]; then
if nvidia-smi &> /dev/null; then
# GPU available with local LLM
docker compose --profile local-gpu up --build -d
else
# No GPU available with local LLM
docker compose --profile local-cpu up --build -d
fi
else
docker compose up --build -d
fi
# Wait for database to be ready
echo "Waiting for database to be ready..."
sleep 5
# Apply migrations automatically
echo "Applying database migrations..."
docker compose exec simstudio npm run db:push
echo "Sim Studio is now running at http://localhost:3000"
echo "To view logs, run: docker compose logs -f simstudio"