refactor(ollama): ollama host -> url (convention) + readme and compose to reflect the same (#394)

* chore(docker): add OLLAMA_HOST environment variable to local and production configurations; update README for docker compose commands

* refactor(env): rename OLLAMA_HOST to OLLAMA_URL in configuration files and update related references
This commit is contained in:
Aditya Tripathi
2025-05-22 02:58:10 +05:30
committed by GitHub
parent a94fd8703f
commit 6d380c28e3
6 changed files with 12 additions and 6 deletions

View File

@@ -54,7 +54,7 @@ git clone https://github.com/simstudioai/sim.git
cd sim
# Start Sim Studio
docker compose up -d --build
docker compose -f docker-compose.prod.yml up -d
```
Access the application at [http://localhost:3000/](http://localhost:3000/)
@@ -73,10 +73,13 @@ To use local models with Sim Studio:
```bash
# With NVIDIA GPU support
docker compose up --profile local-gpu -d --build
docker compose -f docker-compose.ollama.yml up --profile local-gpu -d --build
# Without GPU (CPU only)
docker compose up --profile local-cpu -d --build
docker compose -f docker-compose.ollama.yml up --profile local-cpu -d --build
# If hosting on a server, update the environment variables in the docker-compose.prod.yml file to include the server's public IP then start again (OLLAMA_URL to i.e. http://1.1.1.1:11434)
docker compose -f docker-compose.prod.yml up -d
```
### Option 3: Dev Containers

View File

@@ -53,7 +53,7 @@ export const env = createEnv({
JWT_SECRET: z.string().min(1).optional(),
BROWSERBASE_API_KEY: z.string().min(1).optional(),
BROWSERBASE_PROJECT_ID: z.string().min(1).optional(),
OLLAMA_HOST: z.string().url().optional(),
OLLAMA_URL: z.string().url().optional(),
SENTRY_ORG: z.string().optional(),
SENTRY_PROJECT: z.string().optional(),
SENTRY_AUTH_TOKEN: z.string().optional(),

View File

@@ -138,7 +138,7 @@ const nextConfig: NextConfig = {
},
{
key: 'Content-Security-Policy',
value: `default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://*.vercel-scripts.com https://*.vercel-insights.com https://vercel.live https://*.vercel.live https://vercel.com https://*.vercel.app; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com; media-src 'self' blob:; font-src 'self' https://fonts.gstatic.com; connect-src 'self' ${process.env.OLLAMA_HOST || 'http://localhost:11434'} https://api.browser-use.com https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.vercel-insights.com https://*.atlassian.com https://vercel.live https://*.vercel.live https://vercel.com https://*.vercel.app; frame-src https://drive.google.com https://*.google.com; frame-ancestors 'self'; form-action 'self'; base-uri 'self'; object-src 'none'`,
value: `default-src 'self'; script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.google.com https://apis.google.com https://*.vercel-scripts.com https://*.vercel-insights.com https://vercel.live https://*.vercel.live https://vercel.com https://*.vercel.app; style-src 'self' 'unsafe-inline' https://fonts.googleapis.com; img-src 'self' data: blob: https://*.googleusercontent.com https://*.google.com https://*.atlassian.com https://cdn.discordapp.com; media-src 'self' blob:; font-src 'self' https://fonts.gstatic.com; connect-src 'self' ${env.OLLAMA_URL || 'http://localhost:11434'} https://api.browser-use.com https://*.googleapis.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.vercel-insights.com https://*.atlassian.com https://vercel.live https://*.vercel.live https://vercel.com https://*.vercel.app; frame-src https://drive.google.com https://*.google.com; frame-ancestors 'self'; form-action 'self'; base-uri 'self'; object-src 'none'`,
},
],
},

View File

@@ -1,4 +1,5 @@
import OpenAI from 'openai'
import { env } from '@/lib/env'
import { createLogger } from '@/lib/logs/console-logger'
import { useOllamaStore } from '@/stores/ollama/store'
import { executeTool } from '@/tools'
@@ -6,7 +7,7 @@ import { ProviderConfig, ProviderRequest, ProviderResponse, TimeSegment } from '
import { ModelsObject } from './types'
const logger = createLogger('OllamaProvider')
const OLLAMA_HOST = process.env.OLLAMA_HOST || 'http://localhost:11434'
const OLLAMA_HOST = env.OLLAMA_URL || 'http://localhost:11434'
export const ollamaProvider: ProviderConfig = {
id: 'ollama',

View File

@@ -21,6 +21,7 @@ services:
- GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-placeholder}
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-placeholder}
- RESEND_API_KEY=${RESEND_API_KEY:-placeholder}
- OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434}
depends_on:
db:
condition: service_healthy

View File

@@ -20,6 +20,7 @@ services:
- GITHUB_CLIENT_ID=${GITHUB_CLIENT_ID:-placeholder}
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-placeholder}
- RESEND_API_KEY=${RESEND_API_KEY:-placeholder}
- OLLAMA_URL=${OLLAMA_URL:-http://localhost:11434}
depends_on:
db:
condition: service_healthy