diff --git a/.github/workflows/platform-backend-ci.yml b/.github/workflows/platform-backend-ci.yml
index 1f0c6da3dd..22d1e91ead 100644
--- a/.github/workflows/platform-backend-ci.yml
+++ b/.github/workflows/platform-backend-ci.yml
@@ -41,13 +41,18 @@ jobs:
ports:
- 6379:6379
rabbitmq:
- image: rabbitmq:3.12-management
+ image: rabbitmq:4.1.4
ports:
- 5672:5672
- - 15672:15672
env:
RABBITMQ_DEFAULT_USER: ${{ env.RABBITMQ_DEFAULT_USER }}
RABBITMQ_DEFAULT_PASS: ${{ env.RABBITMQ_DEFAULT_PASS }}
+ options: >-
+ --health-cmd "rabbitmq-diagnostics -q ping"
+ --health-interval 30s
+ --health-timeout 10s
+ --health-retries 5
+ --health-start-period 10s
clamav:
image: clamav/clamav-debian:latest
ports:
diff --git a/.github/workflows/platform-frontend-ci.yml b/.github/workflows/platform-frontend-ci.yml
index 4bf8a2b80c..e788696f9b 100644
--- a/.github/workflows/platform-frontend-ci.yml
+++ b/.github/workflows/platform-frontend-ci.yml
@@ -6,10 +6,16 @@ on:
paths:
- ".github/workflows/platform-frontend-ci.yml"
- "autogpt_platform/frontend/**"
+ - "autogpt_platform/backend/Dockerfile"
+ - "autogpt_platform/docker-compose.yml"
+ - "autogpt_platform/docker-compose.platform.yml"
pull_request:
paths:
- ".github/workflows/platform-frontend-ci.yml"
- "autogpt_platform/frontend/**"
+ - "autogpt_platform/backend/Dockerfile"
+ - "autogpt_platform/docker-compose.yml"
+ - "autogpt_platform/docker-compose.platform.yml"
merge_group:
workflow_dispatch:
diff --git a/autogpt_platform/backend/Dockerfile b/autogpt_platform/backend/Dockerfile
index 05a8d4858b..2a9696d3e5 100644
--- a/autogpt_platform/backend/Dockerfile
+++ b/autogpt_platform/backend/Dockerfile
@@ -53,63 +53,6 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
RUN poetry run prisma generate && poetry run gen-prisma-stub
-# ============================== BACKEND SERVER ============================== #
-
-FROM debian:13-slim AS server
-
-WORKDIR /app
-
-ENV POETRY_HOME=/opt/poetry \
- POETRY_NO_INTERACTION=1 \
- POETRY_VIRTUALENVS_CREATE=true \
- POETRY_VIRTUALENVS_IN_PROJECT=true \
- DEBIAN_FRONTEND=noninteractive
-ENV PATH=/opt/poetry/bin:$PATH
-
-# Install Python, FFmpeg, ImageMagick, and CLI tools for agent use.
-# bubblewrap provides OS-level sandbox (whitelist-only FS + no network)
-# for the bash_exec MCP tool.
-# Using --no-install-recommends saves ~650MB by skipping unnecessary deps like llvm, mesa, etc.
-RUN apt-get update && apt-get install -y --no-install-recommends \
- python3.13 \
- python3-pip \
- ffmpeg \
- imagemagick \
- jq \
- ripgrep \
- tree \
- bubblewrap \
- && rm -rf /var/lib/apt/lists/*
-
-COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
-COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
-# Copy Node.js installation for Prisma
-COPY --from=builder /usr/bin/node /usr/bin/node
-COPY --from=builder /usr/lib/node_modules /usr/lib/node_modules
-COPY --from=builder /usr/bin/npm /usr/bin/npm
-COPY --from=builder /usr/bin/npx /usr/bin/npx
-COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
-
-WORKDIR /app/autogpt_platform/backend
-
-# Copy only the .venv from builder (not the entire /app directory)
-# The .venv includes the generated Prisma client
-COPY --from=builder /app/autogpt_platform/backend/.venv ./.venv
-ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
-
-# Copy dependency files + autogpt_libs (path dependency)
-COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
-COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml ./
-
-# Copy backend code + docs (for Copilot docs search)
-COPY autogpt_platform/backend ./
-COPY docs /app/docs
-RUN poetry install --no-ansi --only-root
-
-ENV PORT=8000
-
-CMD ["poetry", "run", "rest"]
-
# =============================== DB MIGRATOR =============================== #
# Lightweight migrate stage - only needs Prisma CLI, not full Python environment
@@ -141,3 +84,59 @@ COPY autogpt_platform/backend/schema.prisma ./
COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
COPY autogpt_platform/backend/migrations ./migrations
+
+# ============================== BACKEND SERVER ============================== #
+
+FROM debian:13-slim AS server
+
+WORKDIR /app
+
+ENV DEBIAN_FRONTEND=noninteractive
+
+# Install Python, FFmpeg, ImageMagick, and CLI tools for agent use.
+# bubblewrap provides OS-level sandbox (whitelist-only FS + no network)
+# for the bash_exec MCP tool.
+# Using --no-install-recommends saves ~650MB by skipping unnecessary deps like llvm, mesa, etc.
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ python3.13 \
+ python3-pip \
+ ffmpeg \
+ imagemagick \
+ jq \
+ ripgrep \
+ tree \
+ bubblewrap \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy poetry (build-time only, for `poetry install --only-root` to create entry points)
+COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
+COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
+# Copy Node.js installation for Prisma
+COPY --from=builder /usr/bin/node /usr/bin/node
+COPY --from=builder /usr/lib/node_modules /usr/lib/node_modules
+COPY --from=builder /usr/bin/npm /usr/bin/npm
+COPY --from=builder /usr/bin/npx /usr/bin/npx
+COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
+
+WORKDIR /app/autogpt_platform/backend
+
+# Copy only the .venv from builder (not the entire /app directory)
+# The .venv includes the generated Prisma client
+COPY --from=builder /app/autogpt_platform/backend/.venv ./.venv
+ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
+
+# Copy dependency files + autogpt_libs (path dependency)
+COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
+COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml ./
+
+# Copy backend code + docs (for Copilot docs search)
+COPY autogpt_platform/backend ./
+COPY docs /app/docs
+# Install the project package to create entry point scripts in .venv/bin/
+# (e.g., rest, executor, ws, db, scheduler, notification - see [tool.poetry.scripts])
+RUN POETRY_VIRTUALENVS_CREATE=true POETRY_VIRTUALENVS_IN_PROJECT=true \
+ poetry install --no-ansi --only-root
+
+ENV PORT=8000
+
+CMD ["rest"]
diff --git a/autogpt_platform/backend/backend/api/features/chat/routes.py b/autogpt_platform/backend/backend/api/features/chat/routes.py
index 5ab78aa7c3..48e4ecc4f0 100644
--- a/autogpt_platform/backend/backend/api/features/chat/routes.py
+++ b/autogpt_platform/backend/backend/api/features/chat/routes.py
@@ -23,6 +23,7 @@ from .model import (
ChatSession,
append_and_save_message,
create_chat_session,
+ delete_chat_session,
get_chat_session,
get_user_sessions,
)
@@ -211,6 +212,43 @@ async def create_session(
)
+@router.delete(
+ "/sessions/{session_id}",
+ dependencies=[Security(auth.requires_user)],
+ status_code=204,
+ responses={404: {"description": "Session not found or access denied"}},
+)
+async def delete_session(
+ session_id: str,
+ user_id: Annotated[str, Security(auth.get_user_id)],
+) -> Response:
+ """
+ Delete a chat session.
+
+ Permanently removes a chat session and all its messages.
+ Only the owner can delete their sessions.
+
+ Args:
+ session_id: The session ID to delete.
+ user_id: The authenticated user's ID.
+
+ Returns:
+ 204 No Content on success.
+
+ Raises:
+ HTTPException: 404 if session not found or not owned by user.
+ """
+ deleted = await delete_chat_session(session_id, user_id)
+
+ if not deleted:
+ raise HTTPException(
+ status_code=404,
+ detail=f"Session {session_id} not found or access denied",
+ )
+
+ return Response(status_code=204)
+
+
@router.get(
"/sessions/{session_id}",
)
diff --git a/autogpt_platform/backend/backend/api/features/chat/tools/feature_requests.py b/autogpt_platform/backend/backend/api/features/chat/tools/feature_requests.py
index 95f1eb1fbe..8346df1177 100644
--- a/autogpt_platform/backend/backend/api/features/chat/tools/feature_requests.py
+++ b/autogpt_platform/backend/backend/api/features/chat/tools/feature_requests.py
@@ -104,8 +104,8 @@ def _get_linear_config() -> tuple[LinearClient, str, str]:
Raises RuntimeError if any required setting is missing.
"""
secrets = _get_settings().secrets
- if not secrets.linear_api_key:
- raise RuntimeError("LINEAR_API_KEY is not configured")
+ if not secrets.copilot_linear_api_key:
+ raise RuntimeError("COPILOT_LINEAR_API_KEY is not configured")
if not secrets.linear_feature_request_project_id:
raise RuntimeError("LINEAR_FEATURE_REQUEST_PROJECT_ID is not configured")
if not secrets.linear_feature_request_team_id:
@@ -114,7 +114,7 @@ def _get_linear_config() -> tuple[LinearClient, str, str]:
credentials = APIKeyCredentials(
id="system-linear",
provider="linear",
- api_key=SecretStr(secrets.linear_api_key),
+ api_key=SecretStr(secrets.copilot_linear_api_key),
title="System Linear API Key",
)
client = LinearClient(credentials=credentials)
diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py
index 6777448406..f241a7dcbb 100644
--- a/autogpt_platform/backend/backend/util/settings.py
+++ b/autogpt_platform/backend/backend/util/settings.py
@@ -662,7 +662,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
mem0_api_key: str = Field(default="", description="Mem0 API key")
elevenlabs_api_key: str = Field(default="", description="ElevenLabs API key")
- linear_api_key: str = Field(
+ copilot_linear_api_key: str = Field(
default="", description="Linear API key for system-level operations"
)
linear_feature_request_project_id: str = Field(
diff --git a/autogpt_platform/backend/docker-compose.test.yaml b/autogpt_platform/backend/docker-compose.test.yaml
index 259d52c497..5944bf37ee 100644
--- a/autogpt_platform/backend/docker-compose.test.yaml
+++ b/autogpt_platform/backend/docker-compose.test.yaml
@@ -53,7 +53,7 @@ services:
rabbitmq:
<<: *agpt-services
- image: rabbitmq:management
+ image: rabbitmq:4.1.4
container_name: rabbitmq
healthcheck:
test: rabbitmq-diagnostics -q ping
@@ -66,7 +66,6 @@ services:
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
ports:
- "5672:5672"
- - "15672:15672"
clamav:
image: clamav/clamav-debian:latest
ports:
diff --git a/autogpt_platform/docker-compose.platform.yml b/autogpt_platform/docker-compose.platform.yml
index bab92d4693..a104afa63b 100644
--- a/autogpt_platform/docker-compose.platform.yml
+++ b/autogpt_platform/docker-compose.platform.yml
@@ -75,7 +75,7 @@ services:
timeout: 5s
retries: 5
rabbitmq:
- image: rabbitmq:management
+ image: rabbitmq:4.1.4
container_name: rabbitmq
healthcheck:
test: rabbitmq-diagnostics -q ping
@@ -88,14 +88,13 @@ services:
<<: *backend-env
ports:
- "5672:5672"
- - "15672:15672"
rest_server:
build:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.rest"]
+ command: ["rest"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
@@ -128,7 +127,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.exec"]
+ command: ["executor"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
@@ -163,7 +162,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.ws"]
+ command: ["ws"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
@@ -196,7 +195,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.db"]
+ command: ["db"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
@@ -225,7 +224,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.scheduler"]
+ command: ["scheduler"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
@@ -273,7 +272,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: server
- command: ["python", "-m", "backend.notification"]
+ command: ["notification"] # points to entry in [tool.poetry.scripts] in pyproject.toml
develop:
watch:
- path: ./
diff --git a/autogpt_platform/frontend/src/app/(platform)/copilot/CopilotPage.tsx b/autogpt_platform/frontend/src/app/(platform)/copilot/CopilotPage.tsx
index 0d403b1a79..35b34890ce 100644
--- a/autogpt_platform/frontend/src/app/(platform)/copilot/CopilotPage.tsx
+++ b/autogpt_platform/frontend/src/app/(platform)/copilot/CopilotPage.tsx
@@ -1,6 +1,8 @@
"use client";
import { SidebarProvider } from "@/components/ui/sidebar";
+// TODO: Replace with modern Dialog component when available
+import DeleteConfirmDialog from "@/components/__legacy__/delete-confirm-dialog";
import { ChatContainer } from "./components/ChatContainer/ChatContainer";
import { ChatSidebar } from "./components/ChatSidebar/ChatSidebar";
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
@@ -31,6 +33,12 @@ export function CopilotPage() {
handleDrawerOpenChange,
handleSelectSession,
handleNewChat,
+ // Delete functionality
+ sessionToDelete,
+ isDeleting,
+ handleDeleteClick,
+ handleConfirmDelete,
+ handleCancelDelete,
} = useCopilotPage();
if (isUserLoading || !isLoggedIn) {
@@ -48,7 +56,19 @@ export function CopilotPage() {
>
{!isMobile &&
- No conversations yet -
- ) : ( - sessions.map((session) => ( - - )) + <> ++ No conversations yet +
+ ) : ( + sessions.map((session) => ( +