Compare commits

...

1 Commits

Author SHA1 Message Date
Otto
7e5b84cc5c fix(copilot): update homepage copy to focus on problem discovery (#11956)
## Summary
Update the CoPilot homepage to shift from "what do you want to
automate?" to "tell me about your problems." This lowers the barrier to
engagement by letting users describe their work frustrations instead of
requiring them to identify automations themselves.

## Changes
| Element | Before | After |
|---------|--------|-------|
| Headline | "What do you want to automate?" | "Tell me about your work
— I'll find what to automate." |
| Placeholder | "You can search or just ask - e.g. 'create a blog post
outline'" | "What's your role and what eats up most of your day? e.g.
'I'm a real estate agent and I hate...'" |
| Button 1 | "Show me what I can automate" | "I don't know where to
start, just ask me stuff" |
| Button 2 | "Design a custom workflow" | "I do the same thing every
week and it's killing me" |
| Button 3 | "Help me with content creation" | "Help me find where I'm
wasting my time" |
| Container | max-w-2xl | max-w-3xl |

> **Note on container width:** The `max-w-2xl` → `max-w-3xl` change is
just to keep the longer headline on one line. This works but may not be
the ideal solution — @lluis-xai should advise on the proper approach.

## Why This Matters
The current UX assumes users know what they want to automate. In
reality, most users know what frustrates them but can't identify
automations. The current screen blocks Otto from starting the discovery
conversation that leads to useful recommendations.

## Files Changed
- `autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx` —
headline, placeholder, container width
- `autogpt_platform/frontend/src/app/(platform)/copilot/helpers.ts` —
quick action button text

Resolves: [SECRT-1876](https://linear.app/autogpt/issue/SECRT-1876)

---------

Co-authored-by: Lluis Agusti <hi@llu.lu>
2026-02-04 17:38:58 +07:00
6 changed files with 60 additions and 35 deletions

View File

@@ -26,8 +26,20 @@ export function buildCopilotChatUrl(prompt: string): string {
export function getQuickActions(): string[] {
return [
"Show me what I can automate",
"Design a custom workflow",
"Help me with content creation",
"I don't know where to start, just ask me stuff",
"I do the same thing every week and it's killing me",
"Help me find where I'm wasting my time",
];
}
export function getInputPlaceholder(width?: number) {
if (!width) return "What's your role and what eats up most of your day?";
if (width < 500) {
return "I'm a chef and I hate...";
}
if (width <= 1080) {
return "What's your role and what eats up most of your day?";
}
return "What's your role and what eats up most of your day? e.g. 'I'm a recruiter and I hate...'";
}

View File

@@ -6,7 +6,9 @@ import { Text } from "@/components/atoms/Text/Text";
import { Chat } from "@/components/contextual/Chat/Chat";
import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useEffect, useState } from "react";
import { useCopilotStore } from "./copilot-page-store";
import { getInputPlaceholder } from "./helpers";
import { useCopilotPage } from "./useCopilotPage";
export default function CopilotPage() {
@@ -14,8 +16,25 @@ export default function CopilotPage() {
const isInterruptModalOpen = useCopilotStore((s) => s.isInterruptModalOpen);
const confirmInterrupt = useCopilotStore((s) => s.confirmInterrupt);
const cancelInterrupt = useCopilotStore((s) => s.cancelInterrupt);
const [inputPlaceholder, setInputPlaceholder] = useState(
getInputPlaceholder(),
);
useEffect(() => {
const handleResize = () => {
setInputPlaceholder(getInputPlaceholder(window.innerWidth));
};
handleResize();
window.addEventListener("resize", handleResize);
return () => window.removeEventListener("resize", handleResize);
}, []);
const { greetingName, quickActions, isLoading, hasSession, initialPrompt } =
state;
const {
handleQuickAction,
startChatWithPrompt,
@@ -73,7 +92,7 @@ export default function CopilotPage() {
}
return (
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-6 py-10">
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-3 py-5 md:px-6 md:py-10">
<div className="w-full text-center">
{isLoading ? (
<div className="mx-auto max-w-2xl">
@@ -90,25 +109,25 @@ export default function CopilotPage() {
</div>
) : (
<>
<div className="mx-auto max-w-2xl">
<div className="mx-auto max-w-3xl">
<Text
variant="h3"
className="mb-3 !text-[1.375rem] text-zinc-700"
className="mb-1 !text-[1.375rem] text-zinc-700"
>
Hey, <span className="text-violet-600">{greetingName}</span>
</Text>
<Text variant="h3" className="mb-8 !font-normal">
What do you want to automate?
Tell me about your work I&apos;ll find what to automate.
</Text>
<div className="mb-6">
<ChatInput
onSend={startChatWithPrompt}
placeholder='You can search or just ask - e.g. "create a blog post outline"'
placeholder={inputPlaceholder}
/>
</div>
</div>
<div className="flex flex-nowrap items-center justify-center gap-3 overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden">
<div className="flex flex-wrap items-center justify-center gap-3 overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden">
{quickActions.map((action) => (
<Button
key={action}
@@ -116,7 +135,7 @@ export default function CopilotPage() {
variant="outline"
size="small"
onClick={() => handleQuickAction(action)}
className="h-auto shrink-0 border-zinc-600 !px-4 !py-2 text-[1rem] text-zinc-600"
className="h-auto shrink-0 border-zinc-300 px-3 py-2 text-[.9rem] text-zinc-600"
>
{action}
</Button>

View File

@@ -2,7 +2,6 @@ import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessi
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
import { cn } from "@/lib/utils";
import { GlobeHemisphereEastIcon } from "@phosphor-icons/react";
import { useEffect } from "react";
@@ -56,10 +55,6 @@ export function ChatContainer({
onStreamingChange?.(isStreaming);
}, [isStreaming, onStreamingChange]);
const breakpoint = useBreakpoint();
const isMobile =
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
return (
<div
className={cn(
@@ -127,11 +122,7 @@ export function ChatContainer({
disabled={isStreaming || !sessionId}
isStreaming={isStreaming}
onStop={stopStreaming}
placeholder={
isMobile
? "You can search or just ask"
: 'You can search or just ask — e.g. "create a blog post outline"'
}
placeholder="What else can I help with?"
/>
</div>
</div>

View File

@@ -74,19 +74,20 @@ export function ChatInput({
hasMultipleLines ? "rounded-xlarge" : "rounded-full",
)}
>
{!value && !isRecording && (
<div
className="pointer-events-none absolute inset-0 top-0.5 flex items-center justify-start pl-14 text-[1rem] text-zinc-400"
aria-hidden="true"
>
{isTranscribing ? "Transcribing..." : placeholder}
</div>
)}
<textarea
id={inputId}
aria-label="Chat message input"
value={value}
onChange={handleChange}
onKeyDown={handleKeyDown}
placeholder={
isTranscribing
? "Transcribing..."
: isRecording
? ""
: placeholder
}
disabled={isInputDisabled}
rows={1}
className={cn(
@@ -122,13 +123,14 @@ export function ChatInput({
size="icon"
aria-label={isRecording ? "Stop recording" : "Start recording"}
onClick={toggleRecording}
disabled={disabled || isTranscribing}
disabled={disabled || isTranscribing || isStreaming}
className={cn(
isRecording
? "animate-pulse border-red-500 bg-red-500 text-white hover:border-red-600 hover:bg-red-600"
: isTranscribing
? "border-zinc-300 bg-zinc-100 text-zinc-400"
: "border-zinc-300 bg-white text-zinc-500 hover:border-zinc-400 hover:bg-zinc-50 hover:text-zinc-700",
isStreaming && "opacity-40",
)}
>
{isTranscribing ? (

View File

@@ -38,8 +38,8 @@ export function AudioWaveform({
// Create audio context and analyser
const audioContext = new AudioContext();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 512;
analyser.smoothingTimeConstant = 0.8;
analyser.fftSize = 256;
analyser.smoothingTimeConstant = 0.3;
// Connect the stream to the analyser
const source = audioContext.createMediaStreamSource(stream);
@@ -73,10 +73,11 @@ export function AudioWaveform({
maxAmplitude = Math.max(maxAmplitude, amplitude);
}
// Map amplitude (0-128) to bar height
const normalized = (maxAmplitude / 128) * 255;
const height =
minBarHeight + (normalized / 255) * (maxBarHeight - minBarHeight);
// Normalize amplitude (0-128 range) to 0-1
const normalized = maxAmplitude / 128;
// Apply sensitivity boost (multiply by 4) and use sqrt curve to amplify quiet sounds
const boosted = Math.min(1, Math.sqrt(normalized) * 4);
const height = minBarHeight + boosted * (maxBarHeight - minBarHeight);
newBars.push(height);
}

View File

@@ -224,7 +224,7 @@ export function useVoiceRecording({
[value, isTranscribing, toggleRecording, baseHandleKeyDown],
);
const showMicButton = isSupported && !isStreaming;
const showMicButton = isSupported;
const isInputDisabled = disabled || isStreaming || isTranscribing;
// Cleanup on unmount