mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-08 22:48:14 -05:00
feat(copilot): add training interface (#1445)
* progress * cleanup UI * progress * fix trigger mode in yaml + copilot side * persist user settings * wrap operations correctly * add trigger mode to add op * remove misplaced comment * add sent notification * remove unused tab:
This commit is contained in:
committed by
GitHub
parent
2ee27f972e
commit
094dae3d3f
103
apps/sim/app/api/copilot/training/route.ts
Normal file
103
apps/sim/app/api/copilot/training/route.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { env } from '@/lib/env'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
|
||||
const logger = createLogger('CopilotTrainingAPI')
|
||||
|
||||
// Schema for the request body
|
||||
const TrainingDataSchema = z.object({
|
||||
title: z.string().min(1),
|
||||
prompt: z.string().min(1),
|
||||
input: z.any(), // Workflow state (start)
|
||||
output: z.any(), // Workflow state (end)
|
||||
operations: z.any(),
|
||||
})
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Check for required environment variables
|
||||
const baseUrl = env.AGENT_INDEXER_URL
|
||||
if (!baseUrl) {
|
||||
logger.error('Missing AGENT_INDEXER_URL environment variable')
|
||||
return NextResponse.json({ error: 'Agent indexer not configured' }, { status: 500 })
|
||||
}
|
||||
|
||||
const apiKey = env.AGENT_INDEXER_API_KEY
|
||||
if (!apiKey) {
|
||||
logger.error('Missing AGENT_INDEXER_API_KEY environment variable')
|
||||
return NextResponse.json(
|
||||
{ error: 'Agent indexer authentication not configured' },
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
|
||||
// Parse and validate request body
|
||||
const body = await request.json()
|
||||
const validationResult = TrainingDataSchema.safeParse(body)
|
||||
|
||||
if (!validationResult.success) {
|
||||
logger.warn('Invalid training data format', { errors: validationResult.error.errors })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: 'Invalid training data format',
|
||||
details: validationResult.error.errors,
|
||||
},
|
||||
{ status: 400 }
|
||||
)
|
||||
}
|
||||
|
||||
const { title, prompt, input, output, operations } = validationResult.data
|
||||
|
||||
logger.info('Sending training data to agent indexer', {
|
||||
title,
|
||||
operationsCount: operations.length,
|
||||
})
|
||||
|
||||
const wrappedOperations = {
|
||||
operations: operations,
|
||||
}
|
||||
|
||||
// Forward to agent indexer
|
||||
const upstreamUrl = `${baseUrl}/operations/add`
|
||||
const upstreamResponse = await fetch(upstreamUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'x-api-key': apiKey,
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
title,
|
||||
prompt,
|
||||
input,
|
||||
output,
|
||||
operations: wrappedOperations,
|
||||
}),
|
||||
})
|
||||
|
||||
const responseData = await upstreamResponse.json()
|
||||
|
||||
if (!upstreamResponse.ok) {
|
||||
logger.error('Agent indexer rejected the data', {
|
||||
status: upstreamResponse.status,
|
||||
response: responseData,
|
||||
})
|
||||
return NextResponse.json(responseData, { status: upstreamResponse.status })
|
||||
}
|
||||
|
||||
logger.info('Successfully sent training data to agent indexer', {
|
||||
title,
|
||||
response: responseData,
|
||||
})
|
||||
|
||||
return NextResponse.json(responseData)
|
||||
} catch (error) {
|
||||
logger.error('Failed to send training data to agent indexer', { error })
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: error instanceof Error ? error.message : 'Failed to send training data',
|
||||
},
|
||||
{ status: 502 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,8 @@ const SettingsSchema = z.object({
|
||||
})
|
||||
.optional(),
|
||||
billingUsageNotificationsEnabled: z.boolean().optional(),
|
||||
showFloatingControls: z.boolean().optional(),
|
||||
showTrainingControls: z.boolean().optional(),
|
||||
})
|
||||
|
||||
// Default settings values
|
||||
@@ -38,6 +40,8 @@ const defaultSettings = {
|
||||
telemetryEnabled: true,
|
||||
emailPreferences: {},
|
||||
billingUsageNotificationsEnabled: true,
|
||||
showFloatingControls: true,
|
||||
showTrainingControls: false,
|
||||
}
|
||||
|
||||
export async function GET() {
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
'use client'
|
||||
|
||||
import { useEffect, useState } from 'react'
|
||||
import { getEnv, isTruthy } from '@/lib/env'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
import { TrainingFloatingButton } from './training-floating-button'
|
||||
import { TrainingModal } from './training-modal'
|
||||
|
||||
/**
|
||||
* Main training controls component that manages the training UI
|
||||
* Only renders if COPILOT_TRAINING_ENABLED env var is set AND user has enabled it in settings
|
||||
*/
|
||||
export function TrainingControls() {
|
||||
const [isEnvEnabled, setIsEnvEnabled] = useState(false)
|
||||
const showTrainingControls = useGeneralStore((state) => state.showTrainingControls)
|
||||
const { isTraining, showModal, toggleModal } = useCopilotTrainingStore()
|
||||
|
||||
// Check environment variable on mount
|
||||
useEffect(() => {
|
||||
// Use getEnv to check if training is enabled
|
||||
const trainingEnabled = isTruthy(getEnv('NEXT_PUBLIC_COPILOT_TRAINING_ENABLED'))
|
||||
setIsEnvEnabled(trainingEnabled)
|
||||
}, [])
|
||||
|
||||
// Don't render if not enabled by env var OR user settings
|
||||
if (!isEnvEnabled || !showTrainingControls) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Floating button to start/stop training */}
|
||||
<TrainingFloatingButton isTraining={isTraining} onToggleModal={toggleModal} />
|
||||
|
||||
{/* Modal for entering prompt and viewing dataset */}
|
||||
{showModal && <TrainingModal />}
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
'use client'
|
||||
|
||||
import { Database, Pause } from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
|
||||
interface TrainingFloatingButtonProps {
|
||||
isTraining: boolean
|
||||
onToggleModal: () => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Floating button positioned above the diff controls
|
||||
* Shows training state and allows starting/stopping training
|
||||
*/
|
||||
export function TrainingFloatingButton({ isTraining, onToggleModal }: TrainingFloatingButtonProps) {
|
||||
const { stopTraining } = useCopilotTrainingStore()
|
||||
|
||||
const handleClick = () => {
|
||||
if (isTraining) {
|
||||
// Stop and save the training session
|
||||
const dataset = stopTraining()
|
||||
if (dataset) {
|
||||
// Show a brief success indicator
|
||||
const button = document.getElementById('training-button')
|
||||
if (button) {
|
||||
button.classList.add('animate-pulse')
|
||||
setTimeout(() => button.classList.remove('animate-pulse'), 1000)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Open modal to start new training
|
||||
onToggleModal()
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='-translate-x-1/2 fixed bottom-32 left-1/2 z-30'>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
id='training-button'
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={handleClick}
|
||||
className={cn(
|
||||
'flex items-center gap-2 rounded-[14px] border bg-card/95 px-3 py-2 shadow-lg backdrop-blur-sm transition-all',
|
||||
'hover:bg-muted/80',
|
||||
isTraining &&
|
||||
'border-orange-500 bg-orange-50 dark:border-orange-400 dark:bg-orange-950/30'
|
||||
)}
|
||||
>
|
||||
{isTraining ? (
|
||||
<>
|
||||
<Pause className='h-4 w-4 text-orange-600 dark:text-orange-400' />
|
||||
<span className='font-medium text-orange-700 text-sm dark:text-orange-300'>
|
||||
Stop Training
|
||||
</span>
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Database className='h-4 w-4' />
|
||||
<span className='font-medium text-sm'>Train Copilot</span>
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{isTraining
|
||||
? 'Stop recording and save training dataset'
|
||||
: 'Start recording workflow changes for training'}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,688 @@
|
||||
'use client'
|
||||
|
||||
import { useState } from 'react'
|
||||
import {
|
||||
Check,
|
||||
CheckCircle2,
|
||||
ChevronDown,
|
||||
Clipboard,
|
||||
Download,
|
||||
Eye,
|
||||
Send,
|
||||
Trash2,
|
||||
X,
|
||||
XCircle,
|
||||
} from 'lucide-react'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { Checkbox } from '@/components/ui/checkbox'
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from '@/components/ui/dialog'
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import { ScrollArea } from '@/components/ui/scroll-area'
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from '@/components/ui/tabs'
|
||||
import { Textarea } from '@/components/ui/textarea'
|
||||
import { cn } from '@/lib/utils'
|
||||
import { formatEditSequence } from '@/lib/workflows/training/compute-edit-sequence'
|
||||
import { useCopilotTrainingStore } from '@/stores/copilot-training/store'
|
||||
|
||||
/**
|
||||
* Modal for starting training sessions and viewing/exporting datasets
|
||||
*/
|
||||
export function TrainingModal() {
|
||||
const {
|
||||
isTraining,
|
||||
currentTitle,
|
||||
currentPrompt,
|
||||
startSnapshot,
|
||||
datasets,
|
||||
showModal,
|
||||
setPrompt,
|
||||
startTraining,
|
||||
cancelTraining,
|
||||
toggleModal,
|
||||
clearDatasets,
|
||||
exportDatasets,
|
||||
markDatasetSent,
|
||||
} = useCopilotTrainingStore()
|
||||
|
||||
const [localPrompt, setLocalPrompt] = useState(currentPrompt)
|
||||
const [localTitle, setLocalTitle] = useState(currentTitle)
|
||||
const [copiedId, setCopiedId] = useState<string | null>(null)
|
||||
const [viewingDataset, setViewingDataset] = useState<string | null>(null)
|
||||
const [expandedDataset, setExpandedDataset] = useState<string | null>(null)
|
||||
const [sendingDatasets, setSendingDatasets] = useState<Set<string>>(new Set())
|
||||
const [sendingAll, setSendingAll] = useState(false)
|
||||
const [selectedDatasets, setSelectedDatasets] = useState<Set<string>>(new Set())
|
||||
const [sendingSelected, setSendingSelected] = useState(false)
|
||||
const [sentDatasets, setSentDatasets] = useState<Set<string>>(new Set())
|
||||
const [failedDatasets, setFailedDatasets] = useState<Set<string>>(new Set())
|
||||
|
||||
const handleStart = () => {
|
||||
if (localTitle.trim() && localPrompt.trim()) {
|
||||
startTraining(localTitle, localPrompt)
|
||||
setLocalTitle('')
|
||||
setLocalPrompt('')
|
||||
}
|
||||
}
|
||||
|
||||
const handleCopyDataset = (dataset: any) => {
|
||||
const dataStr = JSON.stringify(
|
||||
{
|
||||
prompt: dataset.prompt,
|
||||
startState: dataset.startState,
|
||||
endState: dataset.endState,
|
||||
editSequence: dataset.editSequence,
|
||||
metadata: dataset.metadata,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
|
||||
navigator.clipboard.writeText(dataStr)
|
||||
setCopiedId(dataset.id)
|
||||
setTimeout(() => setCopiedId(null), 2000)
|
||||
}
|
||||
|
||||
const handleExportAll = () => {
|
||||
const dataStr = exportDatasets()
|
||||
const blob = new Blob([dataStr], { type: 'application/json' })
|
||||
const url = URL.createObjectURL(blob)
|
||||
const a = document.createElement('a')
|
||||
a.href = url
|
||||
a.download = `copilot-training-${new Date().toISOString().split('T')[0]}.json`
|
||||
a.click()
|
||||
URL.revokeObjectURL(url)
|
||||
}
|
||||
|
||||
const sendToIndexer = async (dataset: any) => {
|
||||
try {
|
||||
// Extract subblock values from the workflow states
|
||||
const extractSubBlockValues = (state: any) => {
|
||||
const subBlockValues: Record<string, Record<string, any>> = {}
|
||||
|
||||
if (state.blocks) {
|
||||
for (const [blockId, block] of Object.entries(state.blocks)) {
|
||||
if ((block as any).subBlocks) {
|
||||
const blockSubValues: Record<string, any> = {}
|
||||
for (const [subBlockId, subBlock] of Object.entries((block as any).subBlocks)) {
|
||||
if ((subBlock as any).value !== undefined) {
|
||||
blockSubValues[subBlockId] = (subBlock as any).value
|
||||
}
|
||||
}
|
||||
if (Object.keys(blockSubValues).length > 0) {
|
||||
subBlockValues[blockId] = blockSubValues
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return subBlockValues
|
||||
}
|
||||
|
||||
const startSubBlockValues = extractSubBlockValues(dataset.startState)
|
||||
const endSubBlockValues = extractSubBlockValues(dataset.endState)
|
||||
|
||||
// Convert both states to YAML in parallel
|
||||
const [startYamlResponse, endYamlResponse] = await Promise.all([
|
||||
fetch('/api/workflows/yaml/convert', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowState: dataset.startState,
|
||||
subBlockValues: startSubBlockValues,
|
||||
}),
|
||||
}),
|
||||
fetch('/api/workflows/yaml/convert', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
workflowState: dataset.endState,
|
||||
subBlockValues: endSubBlockValues,
|
||||
}),
|
||||
}),
|
||||
])
|
||||
|
||||
if (!startYamlResponse.ok) {
|
||||
throw new Error('Failed to convert start state to YAML')
|
||||
}
|
||||
if (!endYamlResponse.ok) {
|
||||
throw new Error('Failed to convert end state to YAML')
|
||||
}
|
||||
|
||||
const [startResult, endResult] = await Promise.all([
|
||||
startYamlResponse.json(),
|
||||
endYamlResponse.json(),
|
||||
])
|
||||
|
||||
// Now send to the indexer with YAML states
|
||||
const response = await fetch('/api/copilot/training', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
title: dataset.title,
|
||||
prompt: dataset.prompt,
|
||||
input: startResult.yaml, // YAML string
|
||||
output: endResult.yaml, // YAML string
|
||||
operations: dataset.editSequence,
|
||||
}),
|
||||
})
|
||||
|
||||
const result = await response.json()
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(result.error || 'Failed to send to indexer')
|
||||
}
|
||||
|
||||
return result
|
||||
} catch (error) {
|
||||
console.error('Failed to send dataset to indexer:', error)
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const handleSendOne = (dataset: any) => {
|
||||
// Clear any previous status for this dataset
|
||||
setSentDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
setFailedDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
|
||||
// Add to sending set
|
||||
setSendingDatasets((prev) => new Set(prev).add(dataset.id))
|
||||
|
||||
// Fire and forget - handle async without blocking
|
||||
sendToIndexer(dataset)
|
||||
.then(() => {
|
||||
// Remove from sending and mark as sent
|
||||
setSendingDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
setSentDatasets((prev) => new Set(prev).add(dataset.id))
|
||||
// Persist sent marker in store
|
||||
markDatasetSent(dataset.id)
|
||||
// Clear success indicator after 5 seconds
|
||||
setTimeout(() => {
|
||||
setSentDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
}, 5000)
|
||||
})
|
||||
.catch((error) => {
|
||||
// Remove from sending and mark as failed
|
||||
setSendingDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
setFailedDatasets((prev) => new Set(prev).add(dataset.id))
|
||||
// Clear failure indicator after 5 seconds
|
||||
setTimeout(() => {
|
||||
setFailedDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
newSet.delete(dataset.id)
|
||||
return newSet
|
||||
})
|
||||
}, 5000)
|
||||
})
|
||||
}
|
||||
|
||||
const handleSendAll = async () => {
|
||||
setSendingAll(true)
|
||||
try {
|
||||
const results = await Promise.allSettled(datasets.map((dataset) => sendToIndexer(dataset)))
|
||||
|
||||
const successes = results.filter((r) => r.status === 'fulfilled')
|
||||
const failures = results.filter((r) => r.status === 'rejected')
|
||||
|
||||
// Mark successes and failures visually
|
||||
const successfulIds = datasets
|
||||
.filter((_, i) => results[i].status === 'fulfilled')
|
||||
.map((d) => d.id)
|
||||
const failedIds = datasets.filter((_, i) => results[i].status === 'rejected').map((d) => d.id)
|
||||
|
||||
setSentDatasets((prev) => new Set([...prev, ...successfulIds]))
|
||||
setFailedDatasets((prev) => new Set([...prev, ...failedIds]))
|
||||
|
||||
// Persist sent markers for successes
|
||||
successfulIds.forEach((id) => markDatasetSent(id))
|
||||
|
||||
// Auto-clear failure badges after 5s
|
||||
if (failedIds.length > 0) {
|
||||
setTimeout(() => {
|
||||
setFailedDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
failedIds.forEach((id) => newSet.delete(id))
|
||||
return newSet
|
||||
})
|
||||
}, 5000)
|
||||
}
|
||||
} finally {
|
||||
setSendingAll(false)
|
||||
}
|
||||
}
|
||||
|
||||
const handleSendSelected = async () => {
|
||||
if (selectedDatasets.size === 0) return
|
||||
|
||||
setSendingSelected(true)
|
||||
try {
|
||||
const datasetsToSend = datasets.filter((d) => selectedDatasets.has(d.id))
|
||||
const results = await Promise.allSettled(
|
||||
datasetsToSend.map((dataset) => sendToIndexer(dataset))
|
||||
)
|
||||
|
||||
const successfulIds = datasetsToSend
|
||||
.filter((_, i) => results[i].status === 'fulfilled')
|
||||
.map((d) => d.id)
|
||||
const failedIds = datasetsToSend
|
||||
.filter((_, i) => results[i].status === 'rejected')
|
||||
.map((d) => d.id)
|
||||
|
||||
setSentDatasets((prev) => new Set([...prev, ...successfulIds]))
|
||||
setFailedDatasets((prev) => new Set([...prev, ...failedIds]))
|
||||
successfulIds.forEach((id) => markDatasetSent(id))
|
||||
|
||||
// Remove successes from selection
|
||||
setSelectedDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
successfulIds.forEach((id) => newSet.delete(id))
|
||||
return newSet
|
||||
})
|
||||
|
||||
// Auto-clear failure badges after 5s
|
||||
if (failedIds.length > 0) {
|
||||
setTimeout(() => {
|
||||
setFailedDatasets((prev) => {
|
||||
const newSet = new Set(prev)
|
||||
failedIds.forEach((id) => newSet.delete(id))
|
||||
return newSet
|
||||
})
|
||||
}, 5000)
|
||||
}
|
||||
} finally {
|
||||
setSendingSelected(false)
|
||||
}
|
||||
}
|
||||
|
||||
const toggleDatasetSelection = (datasetId: string) => {
|
||||
const newSelection = new Set(selectedDatasets)
|
||||
if (newSelection.has(datasetId)) {
|
||||
newSelection.delete(datasetId)
|
||||
} else {
|
||||
newSelection.add(datasetId)
|
||||
}
|
||||
setSelectedDatasets(newSelection)
|
||||
}
|
||||
|
||||
const toggleSelectAll = () => {
|
||||
if (selectedDatasets.size === datasets.length) {
|
||||
setSelectedDatasets(new Set())
|
||||
} else {
|
||||
setSelectedDatasets(new Set(datasets.map((d) => d.id)))
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog open={showModal} onOpenChange={toggleModal}>
|
||||
<DialogContent className='max-w-3xl'>
|
||||
<DialogHeader>
|
||||
<DialogTitle>Copilot Training Dataset Builder</DialogTitle>
|
||||
<DialogDescription>
|
||||
Record workflow editing sessions to create training datasets for the copilot
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
|
||||
{isTraining && (
|
||||
<>
|
||||
<div className='rounded-lg border bg-orange-50 p-4 dark:bg-orange-950/30 mt-4'>
|
||||
<p className='mb-2 font-medium text-orange-700 dark:text-orange-300'>
|
||||
Recording: {currentTitle}
|
||||
</p>
|
||||
<p className='mb-3 text-sm'>{currentPrompt}</p>
|
||||
<div className='flex gap-2'>
|
||||
<Button variant='outline' size='sm' onClick={cancelTraining} className='flex-1'>
|
||||
<X className='mr-2 h-4 w-4' />
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={() => {
|
||||
useCopilotTrainingStore.getState().stopTraining()
|
||||
setLocalPrompt('')
|
||||
}}
|
||||
className='flex-1'
|
||||
>
|
||||
<Check className='mr-2 h-4 w-4' />
|
||||
Save Dataset
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{startSnapshot && (
|
||||
<div className='rounded-lg border p-3 mt-3'>
|
||||
<p className='mb-2 font-medium text-sm'>Starting State</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
{Object.keys(startSnapshot.blocks).length} blocks, {startSnapshot.edges.length}{' '}
|
||||
edges
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
|
||||
<Tabs defaultValue={isTraining ? 'datasets' : 'new'} className='mt-4'>
|
||||
<TabsList className='grid w-full grid-cols-2'>
|
||||
<TabsTrigger value='new' disabled={isTraining}>
|
||||
New Session
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value='datasets'>Datasets ({datasets.length})</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
{/* New Training Session Tab */}
|
||||
<TabsContent value='new' className='space-y-4'>
|
||||
{startSnapshot && (
|
||||
<div className='rounded-lg border bg-muted/50 p-3'>
|
||||
<p className='font-medium text-muted-foreground text-sm'>Current Workflow State</p>
|
||||
<p className='text-sm'>
|
||||
{Object.keys(startSnapshot.blocks).length} blocks, {startSnapshot.edges.length}{' '}
|
||||
edges
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className='space-y-2'>
|
||||
<Label htmlFor='title'>Title</Label>
|
||||
<Input
|
||||
id='title'
|
||||
placeholder='Enter a title for this training dataset...'
|
||||
value={localTitle}
|
||||
onChange={(e) => setLocalTitle(e.target.value)}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className='space-y-2'>
|
||||
<Label htmlFor='prompt'>Training Prompt</Label>
|
||||
<Textarea
|
||||
id='prompt'
|
||||
placeholder='Enter the user intent/prompt for this workflow transformation...'
|
||||
value={localPrompt}
|
||||
onChange={(e) => setLocalPrompt(e.target.value)}
|
||||
rows={3}
|
||||
/>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
Describe what the next sequence of edits aim to achieve
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<Button
|
||||
onClick={handleStart}
|
||||
disabled={!localTitle.trim() || !localPrompt.trim()}
|
||||
className='w-full'
|
||||
>
|
||||
Start Training Session
|
||||
</Button>
|
||||
</TabsContent>
|
||||
|
||||
{/* Datasets Tab */}
|
||||
<TabsContent value='datasets' className='space-y-4'>
|
||||
{datasets.length === 0 ? (
|
||||
<div className='py-8 text-center text-muted-foreground'>
|
||||
No training datasets yet. Start a new session to create one.
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-3'>
|
||||
<Checkbox
|
||||
checked={datasets.length > 0 && selectedDatasets.size === datasets.length}
|
||||
onCheckedChange={toggleSelectAll}
|
||||
disabled={datasets.length === 0}
|
||||
/>
|
||||
<p className='text-muted-foreground text-sm'>
|
||||
{selectedDatasets.size > 0
|
||||
? `${selectedDatasets.size} of ${datasets.length} selected`
|
||||
: `${datasets.length} dataset${datasets.length !== 1 ? 's' : ''} recorded`}
|
||||
</p>
|
||||
</div>
|
||||
<div className='flex gap-2'>
|
||||
{selectedDatasets.size > 0 && (
|
||||
<Button
|
||||
variant='default'
|
||||
size='sm'
|
||||
onClick={handleSendSelected}
|
||||
disabled={sendingSelected}
|
||||
>
|
||||
<Send className='mr-2 h-4 w-4' />
|
||||
{sendingSelected ? 'Sending...' : `Send ${selectedDatasets.size} Selected`}
|
||||
</Button>
|
||||
)}
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={handleSendAll}
|
||||
disabled={datasets.length === 0 || sendingAll}
|
||||
>
|
||||
<Send className='mr-2 h-4 w-4' />
|
||||
{sendingAll ? 'Sending...' : 'Send All'}
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={handleExportAll}
|
||||
disabled={datasets.length === 0}
|
||||
>
|
||||
<Download className='mr-2 h-4 w-4' />
|
||||
Export
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={clearDatasets}
|
||||
disabled={datasets.length === 0}
|
||||
>
|
||||
<Trash2 className='mr-2 h-4 w-4' />
|
||||
Clear
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<ScrollArea className='h-[400px]'>
|
||||
<div className='space-y-3'>
|
||||
{datasets.map((dataset, index) => (
|
||||
<div
|
||||
key={dataset.id}
|
||||
className='rounded-lg border bg-card transition-colors hover:bg-muted/50'
|
||||
>
|
||||
<div className='flex items-start p-4'>
|
||||
<Checkbox
|
||||
checked={selectedDatasets.has(dataset.id)}
|
||||
onCheckedChange={() => toggleDatasetSelection(dataset.id)}
|
||||
className='mt-0.5 mr-3'
|
||||
/>
|
||||
<button
|
||||
className='flex flex-1 items-center justify-between text-left'
|
||||
onClick={() =>
|
||||
setExpandedDataset(expandedDataset === dataset.id ? null : dataset.id)
|
||||
}
|
||||
>
|
||||
<div className='flex-1'>
|
||||
<p className='font-medium text-sm'>{dataset.title}</p>
|
||||
<p className='text-muted-foreground text-xs'>
|
||||
{dataset.prompt.substring(0, 50)}
|
||||
{dataset.prompt.length > 50 ? '...' : ''}
|
||||
</p>
|
||||
</div>
|
||||
<div className='flex items-center gap-3'>
|
||||
{dataset.sentAt && (
|
||||
<span className='inline-flex items-center rounded-full bg-green-50 px-2 py-0.5 text-green-700 text-xs ring-1 ring-inset ring-green-600/20 dark:bg-green-900/20 dark:text-green-300'>
|
||||
<CheckCircle2 className='mr-1 h-3 w-3' /> Sent
|
||||
</span>
|
||||
)}
|
||||
<span className='text-muted-foreground text-xs'>
|
||||
{dataset.editSequence.length} ops
|
||||
</span>
|
||||
<ChevronDown
|
||||
className={cn(
|
||||
'h-4 w-4 text-muted-foreground transition-transform',
|
||||
expandedDataset === dataset.id && 'rotate-180'
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{expandedDataset === dataset.id && (
|
||||
<div className='space-y-3 border-t px-4 pt-3 pb-4'>
|
||||
<div>
|
||||
<p className='mb-1 font-medium text-sm'>Prompt</p>
|
||||
<p className='text-muted-foreground text-sm'>{dataset.prompt}</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p className='mb-1 font-medium text-sm'>Statistics</p>
|
||||
<div className='grid grid-cols-2 gap-2 text-sm'>
|
||||
<div>
|
||||
<span className='text-muted-foreground'>Duration:</span>{' '}
|
||||
{dataset.metadata?.duration
|
||||
? `${(dataset.metadata.duration / 1000).toFixed(1)}s`
|
||||
: 'N/A'}
|
||||
</div>
|
||||
<div>
|
||||
<span className='text-muted-foreground'>Operations:</span>{' '}
|
||||
{dataset.editSequence.length}
|
||||
</div>
|
||||
<div>
|
||||
<span className='text-muted-foreground'>Final blocks:</span>{' '}
|
||||
{dataset.metadata?.blockCount || 0}
|
||||
</div>
|
||||
<div>
|
||||
<span className='text-muted-foreground'>Final edges:</span>{' '}
|
||||
{dataset.metadata?.edgeCount || 0}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<p className='mb-1 font-medium text-sm'>Edit Sequence</p>
|
||||
<div className='max-h-32 overflow-y-auto rounded border bg-muted/50 p-2'>
|
||||
<ul className='space-y-1 font-mono text-xs'>
|
||||
{formatEditSequence(dataset.editSequence).map((desc, i) => (
|
||||
<li key={i}>{desc}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className='flex gap-2'>
|
||||
<Button
|
||||
variant={
|
||||
sentDatasets.has(dataset.id)
|
||||
? 'outline'
|
||||
: failedDatasets.has(dataset.id)
|
||||
? 'destructive'
|
||||
: 'outline'
|
||||
}
|
||||
size='sm'
|
||||
onClick={() => handleSendOne(dataset)}
|
||||
disabled={sendingDatasets.has(dataset.id)}
|
||||
className={
|
||||
sentDatasets.has(dataset.id)
|
||||
? 'border-green-500 text-green-600 hover:bg-green-50 dark:border-green-400 dark:text-green-400 dark:hover:bg-green-950'
|
||||
: ''
|
||||
}
|
||||
>
|
||||
{sendingDatasets.has(dataset.id) ? (
|
||||
<>
|
||||
<div className='mr-2 h-4 w-4 animate-spin rounded-full border-2 border-current border-t-transparent' />
|
||||
Sending...
|
||||
</>
|
||||
) : sentDatasets.has(dataset.id) ? (
|
||||
<>
|
||||
<CheckCircle2 className='mr-2 h-4 w-4' />
|
||||
Sent
|
||||
</>
|
||||
) : failedDatasets.has(dataset.id) ? (
|
||||
<>
|
||||
<XCircle className='mr-2 h-4 w-4' />
|
||||
Failed
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Send className='mr-2 h-4 w-4' />
|
||||
Send
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => setViewingDataset(dataset.id)}
|
||||
>
|
||||
<Eye className='mr-2 h-4 w-4' />
|
||||
View
|
||||
</Button>
|
||||
<Button
|
||||
variant='outline'
|
||||
size='sm'
|
||||
onClick={() => handleCopyDataset(dataset)}
|
||||
>
|
||||
{copiedId === dataset.id ? (
|
||||
<>
|
||||
<Check className='mr-2 h-4 w-4' />
|
||||
Copied!
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Clipboard className='mr-2 h-4 w-4' />
|
||||
Copy
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{viewingDataset === dataset.id && (
|
||||
<div className='rounded border bg-muted/50 p-3'>
|
||||
<pre className='max-h-64 overflow-auto text-xs'>
|
||||
{JSON.stringify(
|
||||
{
|
||||
prompt: dataset.prompt,
|
||||
editSequence: dataset.editSequence,
|
||||
metadata: dataset.metadata,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)}
|
||||
</pre>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</>
|
||||
)}
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
)
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import { ErrorBoundary } from '@/app/workspace/[workspaceId]/w/[workflowId]/comp
|
||||
import { FloatingControls } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/floating-controls/floating-controls'
|
||||
import { Panel } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/panel/panel'
|
||||
import { SubflowNodeComponent } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/subflows/subflow-node'
|
||||
import { TrainingControls } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/training-controls/training-controls'
|
||||
import { TriggerList } from '@/app/workspace/[workspaceId]/w/[workflowId]/components/trigger-list/trigger-list'
|
||||
import {
|
||||
TriggerWarningDialog,
|
||||
@@ -1874,6 +1875,9 @@ const WorkflowContent = React.memo(() => {
|
||||
{/* Floating Controls (Zoom, Undo, Redo) */}
|
||||
<FloatingControls />
|
||||
|
||||
{/* Training Controls - for recording workflow edits */}
|
||||
<TrainingControls />
|
||||
|
||||
<ReactFlow
|
||||
nodes={nodes}
|
||||
edges={edgesWithSelection}
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { Skeleton } from '@/components/ui/skeleton'
|
||||
import { Switch } from '@/components/ui/switch'
|
||||
import { Tooltip, TooltipContent, TooltipTrigger } from '@/components/ui/tooltip'
|
||||
import { getEnv, isTruthy } from '@/lib/env'
|
||||
import { useGeneralStore } from '@/stores/settings/general/store'
|
||||
|
||||
const TOOLTIPS = {
|
||||
@@ -21,16 +22,20 @@ const TOOLTIPS = {
|
||||
'Show console entries expanded by default. When disabled, entries will be collapsed by default.',
|
||||
floatingControls:
|
||||
'Show floating controls for zoom, undo, and redo at the bottom of the workflow canvas.',
|
||||
trainingControls:
|
||||
'Show training controls for recording workflow edits to build copilot training datasets.',
|
||||
}
|
||||
|
||||
export function General() {
|
||||
const isLoading = useGeneralStore((state) => state.isLoading)
|
||||
const isTrainingEnabled = isTruthy(getEnv('NEXT_PUBLIC_COPILOT_TRAINING_ENABLED'))
|
||||
const theme = useGeneralStore((state) => state.theme)
|
||||
const isAutoConnectEnabled = useGeneralStore((state) => state.isAutoConnectEnabled)
|
||||
|
||||
const isAutoPanEnabled = useGeneralStore((state) => state.isAutoPanEnabled)
|
||||
const isConsoleExpandedByDefault = useGeneralStore((state) => state.isConsoleExpandedByDefault)
|
||||
const showFloatingControls = useGeneralStore((state) => state.showFloatingControls)
|
||||
const showTrainingControls = useGeneralStore((state) => state.showTrainingControls)
|
||||
|
||||
// Loading states
|
||||
const isAutoConnectLoading = useGeneralStore((state) => state.isAutoConnectLoading)
|
||||
@@ -41,6 +46,7 @@ export function General() {
|
||||
)
|
||||
const isThemeLoading = useGeneralStore((state) => state.isThemeLoading)
|
||||
const isFloatingControlsLoading = useGeneralStore((state) => state.isFloatingControlsLoading)
|
||||
const isTrainingControlsLoading = useGeneralStore((state) => state.isTrainingControlsLoading)
|
||||
|
||||
const setTheme = useGeneralStore((state) => state.setTheme)
|
||||
const toggleAutoConnect = useGeneralStore((state) => state.toggleAutoConnect)
|
||||
@@ -50,6 +56,7 @@ export function General() {
|
||||
(state) => state.toggleConsoleExpandedByDefault
|
||||
)
|
||||
const toggleFloatingControls = useGeneralStore((state) => state.toggleFloatingControls)
|
||||
const toggleTrainingControls = useGeneralStore((state) => state.toggleTrainingControls)
|
||||
|
||||
// Sync theme from store to next-themes when theme changes
|
||||
useEffect(() => {
|
||||
@@ -88,6 +95,12 @@ export function General() {
|
||||
}
|
||||
}
|
||||
|
||||
const handleTrainingControlsChange = async (checked: boolean) => {
|
||||
if (checked !== showTrainingControls && !isTrainingControlsLoading) {
|
||||
await toggleTrainingControls()
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className='px-6 pt-4 pb-2'>
|
||||
<div className='flex flex-col gap-4'>
|
||||
@@ -282,6 +295,38 @@ export function General() {
|
||||
disabled={isLoading || isFloatingControlsLoading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{isTrainingEnabled && (
|
||||
<div className='flex items-center justify-between'>
|
||||
<div className='flex items-center gap-2'>
|
||||
<Label htmlFor='training-controls' className='font-normal'>
|
||||
Training controls
|
||||
</Label>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
className='h-5 w-5 p-0'
|
||||
aria-label='Learn more about training controls'
|
||||
disabled={isLoading || isTrainingControlsLoading}
|
||||
>
|
||||
<Info className='h-3.5 w-3.5 text-muted-foreground' />
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side='top' className='max-w-[300px] p-3'>
|
||||
<p className='text-sm'>{TOOLTIPS.trainingControls}</p>
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</div>
|
||||
<Switch
|
||||
id='training-controls'
|
||||
checked={showTrainingControls}
|
||||
onCheckedChange={handleTrainingControlsChange}
|
||||
disabled={isLoading || isTrainingControlsLoading}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -69,6 +69,7 @@ export interface CopilotToolMetadata {
|
||||
export interface CopilotTriggerMetadata {
|
||||
id: string
|
||||
outputs?: any
|
||||
configFields?: any
|
||||
}
|
||||
|
||||
export interface CopilotBlockMetadata {
|
||||
@@ -163,6 +164,7 @@ export const getBlocksMetadataServerTool: BaseServerTool<
|
||||
triggers.push({
|
||||
id: tid,
|
||||
outputs: trig?.outputs || {},
|
||||
configFields: trig?.configFields || {},
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -119,6 +119,31 @@ async function applyOperationsToYaml(
|
||||
}
|
||||
if (params?.type) block.type = params.type
|
||||
if (params?.name) block.name = params.name
|
||||
// Handle trigger mode toggle and clean incoming edges when enabling
|
||||
if (typeof params?.triggerMode === 'boolean') {
|
||||
// Set triggerMode as a top-level block property
|
||||
block.triggerMode = params.triggerMode
|
||||
|
||||
if (params.triggerMode === true) {
|
||||
// Remove all incoming connections where this block is referenced as a target
|
||||
Object.values(workflowData.blocks).forEach((other: any) => {
|
||||
if (!other?.connections) return
|
||||
Object.keys(other.connections).forEach((handle) => {
|
||||
const value = other.connections[handle]
|
||||
if (typeof value === 'string') {
|
||||
if (value === block_id) delete other.connections[handle]
|
||||
} else if (Array.isArray(value)) {
|
||||
other.connections[handle] = value.filter((item: any) =>
|
||||
typeof item === 'string' ? item !== block_id : item?.block !== block_id
|
||||
)
|
||||
if (other.connections[handle].length === 0) delete other.connections[handle]
|
||||
} else if (typeof value === 'object' && value?.block) {
|
||||
if (value.block === block_id) delete other.connections[handle]
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
if (params?.removeEdges && Array.isArray(params.removeEdges)) {
|
||||
params.removeEdges.forEach(({ targetBlockId, sourceHandle = 'default' }) => {
|
||||
const value = block.connections?.[sourceHandle]
|
||||
@@ -234,6 +259,14 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
|
||||
subBlockValues = fromDb.subBlockValues
|
||||
}
|
||||
|
||||
// Log the workflow state to see if triggerMode is present
|
||||
logger.info('Workflow state being sent to sim-agent for YAML conversion:', {
|
||||
blockCount: Object.keys(workflowState.blocks || {}).length,
|
||||
blocksWithTriggerMode: Object.entries(workflowState.blocks || {})
|
||||
.filter(([_, block]: [string, any]) => block.triggerMode === true)
|
||||
.map(([id]) => id),
|
||||
})
|
||||
|
||||
const resp = await fetch(`${SIM_AGENT_API_URL}/api/workflow/to-yaml`, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
|
||||
@@ -31,6 +31,8 @@ export const env = createEnv({
|
||||
COPILOT_MODEL: z.string().optional(), // Model for copilot API calls
|
||||
COPILOT_API_KEY: z.string().min(1).optional(), // Secret for internal sim agent API authentication
|
||||
SIM_AGENT_API_URL: z.string().url().optional(), // URL for internal sim agent API
|
||||
AGENT_INDEXER_URL: z.string().url().optional(), // URL for agent training data indexer
|
||||
AGENT_INDEXER_API_KEY: z.string().min(1).optional(), // API key for agent indexer authentication
|
||||
|
||||
|
||||
// Database & Storage
|
||||
@@ -228,7 +230,8 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_CUSTOM_CSS_URL: z.string().url().optional(), // Custom CSS stylesheet URL
|
||||
NEXT_PUBLIC_SUPPORT_EMAIL: z.string().email().optional(), // Custom support email
|
||||
|
||||
NEXT_PUBLIC_E2B_ENABLED: z.string().optional(), // Enable E2B remote code execution (client-side)
|
||||
NEXT_PUBLIC_E2B_ENABLED: z.string().optional(),
|
||||
NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: z.string().optional(),
|
||||
NEXT_PUBLIC_DOCUMENTATION_URL: z.string().url().optional(), // Custom documentation URL
|
||||
NEXT_PUBLIC_TERMS_URL: z.string().url().optional(), // Custom terms of service URL
|
||||
NEXT_PUBLIC_PRIVACY_URL: z.string().url().optional(), // Custom privacy policy URL
|
||||
@@ -274,6 +277,7 @@ export const env = createEnv({
|
||||
NEXT_PUBLIC_BRAND_BACKGROUND_COLOR: process.env.NEXT_PUBLIC_BRAND_BACKGROUND_COLOR,
|
||||
NEXT_PUBLIC_TRIGGER_DEV_ENABLED: process.env.NEXT_PUBLIC_TRIGGER_DEV_ENABLED,
|
||||
NEXT_PUBLIC_E2B_ENABLED: process.env.NEXT_PUBLIC_E2B_ENABLED,
|
||||
NEXT_PUBLIC_COPILOT_TRAINING_ENABLED: process.env.NEXT_PUBLIC_COPILOT_TRAINING_ENABLED,
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
NEXT_TELEMETRY_DISABLED: process.env.NEXT_TELEMETRY_DISABLED,
|
||||
},
|
||||
|
||||
297
apps/sim/lib/workflows/training/compute-edit-sequence.ts
Normal file
297
apps/sim/lib/workflows/training/compute-edit-sequence.ts
Normal file
@@ -0,0 +1,297 @@
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
export interface EditOperation {
|
||||
operation_type: 'add' | 'edit' | 'delete'
|
||||
block_id: string
|
||||
params?: {
|
||||
type?: string
|
||||
name?: string
|
||||
triggerMode?: boolean
|
||||
inputs?: Record<string, any>
|
||||
connections?: Record<string, any>
|
||||
removeEdges?: Array<{ targetBlockId: string; sourceHandle?: string }>
|
||||
}
|
||||
}
|
||||
|
||||
export interface WorkflowDiff {
|
||||
operations: EditOperation[]
|
||||
summary: {
|
||||
blocksAdded: number
|
||||
blocksModified: number
|
||||
blocksDeleted: number
|
||||
edgesChanged: number
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the edit sequence (operations) needed to transform startState into endState
|
||||
* This analyzes the differences and generates operations that can recreate the changes
|
||||
*/
|
||||
export function computeEditSequence(
|
||||
startState: WorkflowState,
|
||||
endState: WorkflowState
|
||||
): WorkflowDiff {
|
||||
const operations: EditOperation[] = []
|
||||
|
||||
const startBlocks = startState.blocks || {}
|
||||
const endBlocks = endState.blocks || {}
|
||||
const startEdges = startState.edges || []
|
||||
const endEdges = endState.edges || []
|
||||
|
||||
// Track statistics
|
||||
let blocksAdded = 0
|
||||
let blocksModified = 0
|
||||
let blocksDeleted = 0
|
||||
let edgesChanged = 0
|
||||
|
||||
// 1. Find deleted blocks (exist in start but not in end)
|
||||
for (const blockId in startBlocks) {
|
||||
if (!(blockId in endBlocks)) {
|
||||
operations.push({
|
||||
operation_type: 'delete',
|
||||
block_id: blockId,
|
||||
})
|
||||
blocksDeleted++
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Find added blocks (exist in end but not in start)
|
||||
for (const blockId in endBlocks) {
|
||||
if (!(blockId in startBlocks)) {
|
||||
const block = endBlocks[blockId]
|
||||
const addParams: Record<string, any> = {
|
||||
type: block.type,
|
||||
name: block.name,
|
||||
inputs: extractInputValues(block),
|
||||
connections: extractConnections(blockId, endEdges),
|
||||
triggerMode: Boolean(block?.triggerMode),
|
||||
}
|
||||
|
||||
operations.push({
|
||||
operation_type: 'add',
|
||||
block_id: blockId,
|
||||
params: addParams,
|
||||
})
|
||||
blocksAdded++
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Find modified blocks (exist in both but have changes)
|
||||
for (const blockId in endBlocks) {
|
||||
if (blockId in startBlocks) {
|
||||
const startBlock = startBlocks[blockId]
|
||||
const endBlock = endBlocks[blockId]
|
||||
const changes = computeBlockChanges(startBlock, endBlock, blockId, startEdges, endEdges)
|
||||
|
||||
if (changes) {
|
||||
operations.push({
|
||||
operation_type: 'edit',
|
||||
block_id: blockId,
|
||||
params: changes,
|
||||
})
|
||||
blocksModified++
|
||||
if (changes.connections || changes.removeEdges) {
|
||||
edgesChanged++
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
operations,
|
||||
summary: {
|
||||
blocksAdded,
|
||||
blocksModified,
|
||||
blocksDeleted,
|
||||
edgesChanged,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract input values from a block's subBlocks
|
||||
*/
|
||||
function extractInputValues(block: any): Record<string, any> {
|
||||
const inputs: Record<string, any> = {}
|
||||
|
||||
if (block.subBlocks) {
|
||||
for (const [subBlockId, subBlock] of Object.entries(block.subBlocks)) {
|
||||
if ((subBlock as any).value !== undefined && (subBlock as any).value !== null) {
|
||||
inputs[subBlockId] = (subBlock as any).value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return inputs
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract connections for a specific block from edges
|
||||
*/
|
||||
function extractConnections(
|
||||
blockId: string,
|
||||
edges: Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}>
|
||||
): Record<string, any> {
|
||||
const connections: Record<string, any> = {}
|
||||
|
||||
// Find all edges where this block is the source
|
||||
const outgoingEdges = edges.filter((edge) => edge.source === blockId)
|
||||
|
||||
for (const edge of outgoingEdges) {
|
||||
const handle = edge.sourceHandle || 'default'
|
||||
|
||||
// Group by source handle
|
||||
if (!connections[handle]) {
|
||||
connections[handle] = []
|
||||
}
|
||||
|
||||
// Add target block to this handle's connections
|
||||
if (edge.targetHandle && edge.targetHandle !== 'target') {
|
||||
connections[handle].push({
|
||||
block: edge.target,
|
||||
handle: edge.targetHandle,
|
||||
})
|
||||
} else {
|
||||
connections[handle].push(edge.target)
|
||||
}
|
||||
}
|
||||
|
||||
// Simplify single-element arrays to just the element
|
||||
for (const handle in connections) {
|
||||
if (Array.isArray(connections[handle]) && connections[handle].length === 1) {
|
||||
connections[handle] = connections[handle][0]
|
||||
}
|
||||
}
|
||||
|
||||
return connections
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute what changed in a block between two states
|
||||
*/
|
||||
function computeBlockChanges(
|
||||
startBlock: any,
|
||||
endBlock: any,
|
||||
blockId: string,
|
||||
startEdges: Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}>,
|
||||
endEdges: Array<{
|
||||
source: string
|
||||
target: string
|
||||
sourceHandle?: string | null
|
||||
targetHandle?: string | null
|
||||
}>
|
||||
): Record<string, any> | null {
|
||||
const changes: Record<string, any> = {}
|
||||
let hasChanges = false
|
||||
|
||||
// Check type change
|
||||
if (startBlock.type !== endBlock.type) {
|
||||
changes.type = endBlock.type
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check name change
|
||||
if (startBlock.name !== endBlock.name) {
|
||||
changes.name = endBlock.name
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check trigger mode change (covers entering/exiting trigger mode)
|
||||
const startTrigger = Boolean(startBlock?.triggerMode)
|
||||
const endTrigger = Boolean(endBlock?.triggerMode)
|
||||
if (startTrigger !== endTrigger) {
|
||||
changes.triggerMode = endTrigger
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check input value changes
|
||||
const startInputs = extractInputValues(startBlock)
|
||||
const endInputs = extractInputValues(endBlock)
|
||||
|
||||
if (JSON.stringify(startInputs) !== JSON.stringify(endInputs)) {
|
||||
changes.inputs = endInputs
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
// Check connection changes
|
||||
const startConnections = extractConnections(blockId, startEdges)
|
||||
const endConnections = extractConnections(blockId, endEdges)
|
||||
|
||||
if (JSON.stringify(startConnections) !== JSON.stringify(endConnections)) {
|
||||
// Compute which edges were removed
|
||||
const removedEdges: Array<{ targetBlockId: string; sourceHandle?: string }> = []
|
||||
|
||||
for (const handle in startConnections) {
|
||||
const startTargets = Array.isArray(startConnections[handle])
|
||||
? startConnections[handle]
|
||||
: [startConnections[handle]]
|
||||
const endTargets = endConnections[handle]
|
||||
? Array.isArray(endConnections[handle])
|
||||
? endConnections[handle]
|
||||
: [endConnections[handle]]
|
||||
: []
|
||||
|
||||
for (const target of startTargets) {
|
||||
const targetId = typeof target === 'object' ? target.block : target
|
||||
const isPresent = endTargets.some(
|
||||
(t: any) => (typeof t === 'object' ? t.block : t) === targetId
|
||||
)
|
||||
|
||||
if (!isPresent) {
|
||||
removedEdges.push({
|
||||
targetBlockId: targetId,
|
||||
sourceHandle: handle !== 'default' ? handle : undefined,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (removedEdges.length > 0) {
|
||||
changes.removeEdges = removedEdges
|
||||
}
|
||||
|
||||
// Add new connections
|
||||
if (Object.keys(endConnections).length > 0) {
|
||||
changes.connections = endConnections
|
||||
}
|
||||
|
||||
hasChanges = true
|
||||
}
|
||||
|
||||
return hasChanges ? changes : null
|
||||
}
|
||||
|
||||
/**
|
||||
* Format edit operations into a human-readable description
|
||||
*/
|
||||
export function formatEditSequence(operations: EditOperation[]): string[] {
|
||||
return operations.map((op) => {
|
||||
switch (op.operation_type) {
|
||||
case 'add':
|
||||
return `Add block "${op.params?.name || op.block_id}" (${op.params?.type || 'unknown'})`
|
||||
case 'delete':
|
||||
return `Delete block "${op.block_id}"`
|
||||
case 'edit': {
|
||||
const changes: string[] = []
|
||||
if (op.params?.type) changes.push(`type to ${op.params.type}`)
|
||||
if (op.params?.name) changes.push(`name to "${op.params.name}"`)
|
||||
if (op.params?.inputs) changes.push('inputs')
|
||||
if (op.params?.connections) changes.push('connections')
|
||||
if (op.params?.removeEdges) changes.push(`remove ${op.params.removeEdges.length} edge(s)`)
|
||||
return `Edit block "${op.block_id}": ${changes.join(', ')}`
|
||||
}
|
||||
default:
|
||||
return `Unknown operation on block "${op.block_id}"`
|
||||
}
|
||||
})
|
||||
}
|
||||
235
apps/sim/stores/copilot-training/store.ts
Normal file
235
apps/sim/stores/copilot-training/store.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import { create } from 'zustand'
|
||||
import { devtools } from 'zustand/middleware'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import {
|
||||
computeEditSequence,
|
||||
type EditOperation,
|
||||
} from '@/lib/workflows/training/compute-edit-sequence'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
import { useWorkflowStore } from '@/stores/workflows/workflow/store'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
|
||||
const logger = createLogger('CopilotTrainingStore')
|
||||
|
||||
export interface TrainingDataset {
|
||||
id: string
|
||||
workflowId: string
|
||||
title: string
|
||||
prompt: string
|
||||
startState: WorkflowState
|
||||
endState: WorkflowState
|
||||
editSequence: EditOperation[]
|
||||
createdAt: Date
|
||||
sentAt?: Date
|
||||
metadata?: {
|
||||
duration?: number // Time taken to complete edits in ms
|
||||
blockCount?: number
|
||||
edgeCount?: number
|
||||
}
|
||||
}
|
||||
|
||||
interface CopilotTrainingState {
|
||||
// Current training session
|
||||
isTraining: boolean
|
||||
currentTitle: string
|
||||
currentPrompt: string
|
||||
startSnapshot: WorkflowState | null
|
||||
startTime: number | null
|
||||
|
||||
// Completed datasets
|
||||
datasets: TrainingDataset[]
|
||||
|
||||
// UI state
|
||||
showModal: boolean
|
||||
|
||||
// Actions
|
||||
startTraining: (title: string, prompt: string) => void
|
||||
stopTraining: () => TrainingDataset | null
|
||||
cancelTraining: () => void
|
||||
setPrompt: (prompt: string) => void
|
||||
toggleModal: () => void
|
||||
clearDatasets: () => void
|
||||
exportDatasets: () => string
|
||||
markDatasetSent: (id: string, sentAt?: Date) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a clean snapshot of the current workflow state
|
||||
*/
|
||||
function captureWorkflowSnapshot(): WorkflowState {
|
||||
const rawState = useWorkflowStore.getState().getWorkflowState()
|
||||
|
||||
// Merge subblock values to get complete state
|
||||
const blocksWithSubblockValues = mergeSubblockState(rawState.blocks)
|
||||
|
||||
// Clean the state - only include essential fields
|
||||
return {
|
||||
blocks: blocksWithSubblockValues,
|
||||
edges: rawState.edges || [],
|
||||
loops: rawState.loops || {},
|
||||
parallels: rawState.parallels || {},
|
||||
lastSaved: Date.now(),
|
||||
}
|
||||
}
|
||||
|
||||
export const useCopilotTrainingStore = create<CopilotTrainingState>()(
|
||||
devtools(
|
||||
(set, get) => ({
|
||||
// Initial state
|
||||
isTraining: false,
|
||||
currentTitle: '',
|
||||
currentPrompt: '',
|
||||
startSnapshot: null,
|
||||
startTime: null,
|
||||
datasets: [],
|
||||
showModal: false,
|
||||
|
||||
// Start a new training session
|
||||
startTraining: (title: string, prompt: string) => {
|
||||
if (!prompt.trim()) {
|
||||
logger.warn('Cannot start training without a prompt')
|
||||
return
|
||||
}
|
||||
if (!title.trim()) {
|
||||
logger.warn('Cannot start training without a title')
|
||||
return
|
||||
}
|
||||
|
||||
const snapshot = captureWorkflowSnapshot()
|
||||
|
||||
logger.info('Starting training session', {
|
||||
title,
|
||||
prompt,
|
||||
blockCount: Object.keys(snapshot.blocks).length,
|
||||
edgeCount: snapshot.edges.length,
|
||||
})
|
||||
|
||||
set({
|
||||
isTraining: true,
|
||||
currentTitle: title,
|
||||
currentPrompt: prompt,
|
||||
startSnapshot: snapshot,
|
||||
startTime: Date.now(),
|
||||
showModal: false, // Close modal when starting
|
||||
})
|
||||
},
|
||||
|
||||
// Stop training and save the dataset
|
||||
stopTraining: () => {
|
||||
const state = get()
|
||||
|
||||
if (!state.isTraining || !state.startSnapshot) {
|
||||
logger.warn('No active training session to stop')
|
||||
return null
|
||||
}
|
||||
|
||||
const endSnapshot = captureWorkflowSnapshot()
|
||||
const duration = state.startTime ? Date.now() - state.startTime : 0
|
||||
|
||||
// Compute the edit sequence
|
||||
const { operations, summary } = computeEditSequence(state.startSnapshot, endSnapshot)
|
||||
|
||||
// Get workflow ID from the store
|
||||
const { activeWorkflowId } = useWorkflowStore.getState() as any
|
||||
|
||||
const dataset: TrainingDataset = {
|
||||
id: crypto.randomUUID(),
|
||||
workflowId: activeWorkflowId || 'unknown',
|
||||
title: state.currentTitle,
|
||||
prompt: state.currentPrompt,
|
||||
startState: state.startSnapshot,
|
||||
endState: endSnapshot,
|
||||
editSequence: operations,
|
||||
createdAt: new Date(),
|
||||
metadata: {
|
||||
duration,
|
||||
blockCount: Object.keys(endSnapshot.blocks).length,
|
||||
edgeCount: endSnapshot.edges.length,
|
||||
},
|
||||
}
|
||||
|
||||
logger.info('Training session completed', {
|
||||
title: state.currentTitle,
|
||||
prompt: state.currentPrompt,
|
||||
duration,
|
||||
operations: operations.length,
|
||||
summary,
|
||||
})
|
||||
|
||||
set((prev) => ({
|
||||
isTraining: false,
|
||||
currentTitle: '',
|
||||
currentPrompt: '',
|
||||
startSnapshot: null,
|
||||
startTime: null,
|
||||
datasets: [...prev.datasets, dataset],
|
||||
}))
|
||||
|
||||
return dataset
|
||||
},
|
||||
|
||||
// Cancel training without saving
|
||||
cancelTraining: () => {
|
||||
logger.info('Training session cancelled')
|
||||
|
||||
set({
|
||||
isTraining: false,
|
||||
currentTitle: '',
|
||||
currentPrompt: '',
|
||||
startSnapshot: null,
|
||||
startTime: null,
|
||||
})
|
||||
},
|
||||
|
||||
// Update the prompt
|
||||
setPrompt: (prompt: string) => {
|
||||
set({ currentPrompt: prompt })
|
||||
},
|
||||
|
||||
// Toggle modal visibility
|
||||
toggleModal: () => {
|
||||
set((state) => ({ showModal: !state.showModal }))
|
||||
},
|
||||
|
||||
// Clear all datasets
|
||||
clearDatasets: () => {
|
||||
logger.info('Clearing all training datasets')
|
||||
set({ datasets: [] })
|
||||
},
|
||||
|
||||
// Export datasets as JSON
|
||||
exportDatasets: () => {
|
||||
const { datasets } = get()
|
||||
|
||||
const exportData = {
|
||||
version: '1.0',
|
||||
exportedAt: new Date().toISOString(),
|
||||
datasets: datasets.map((d) => ({
|
||||
id: d.id,
|
||||
workflowId: d.workflowId,
|
||||
prompt: d.prompt,
|
||||
startState: d.startState,
|
||||
endState: d.endState,
|
||||
editSequence: d.editSequence,
|
||||
createdAt: d.createdAt.toISOString(),
|
||||
sentAt: d.sentAt ? d.sentAt.toISOString() : undefined,
|
||||
metadata: d.metadata,
|
||||
})),
|
||||
}
|
||||
|
||||
return JSON.stringify(exportData, null, 2)
|
||||
},
|
||||
|
||||
// Mark a dataset as sent (persist a timestamp)
|
||||
markDatasetSent: (id: string, sentAt?: Date) => {
|
||||
const when = sentAt ?? new Date()
|
||||
set((state) => ({
|
||||
datasets: state.datasets.map((d) => (d.id === id ? { ...d, sentAt: when } : d)),
|
||||
}))
|
||||
},
|
||||
}),
|
||||
{
|
||||
name: 'copilot-training-store',
|
||||
}
|
||||
)
|
||||
)
|
||||
@@ -23,6 +23,7 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
isConsoleExpandedByDefault: true,
|
||||
isDebugModeEnabled: false,
|
||||
showFloatingControls: true,
|
||||
showTrainingControls: false,
|
||||
theme: 'system' as const, // Keep for compatibility but not used
|
||||
telemetryEnabled: true,
|
||||
isLoading: false,
|
||||
@@ -36,6 +37,7 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
isBillingUsageNotificationsLoading: false,
|
||||
isBillingUsageNotificationsEnabled: true,
|
||||
isFloatingControlsLoading: false,
|
||||
isTrainingControlsLoading: false,
|
||||
}
|
||||
|
||||
// Optimistic update helper
|
||||
@@ -114,6 +116,17 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
)
|
||||
},
|
||||
|
||||
toggleTrainingControls: async () => {
|
||||
if (get().isTrainingControlsLoading) return
|
||||
const newValue = !get().showTrainingControls
|
||||
await updateSettingOptimistic(
|
||||
'showTrainingControls',
|
||||
newValue,
|
||||
'isTrainingControlsLoading',
|
||||
'showTrainingControls'
|
||||
)
|
||||
},
|
||||
|
||||
setTheme: async (theme) => {
|
||||
if (get().isThemeLoading) return
|
||||
|
||||
@@ -217,6 +230,7 @@ export const useGeneralStore = create<GeneralStore>()(
|
||||
isAutoPanEnabled: data.autoPan ?? true,
|
||||
isConsoleExpandedByDefault: data.consoleExpandedByDefault ?? true,
|
||||
showFloatingControls: data.showFloatingControls ?? true,
|
||||
showTrainingControls: data.showTrainingControls ?? false,
|
||||
theme: data.theme || 'system',
|
||||
telemetryEnabled: data.telemetryEnabled,
|
||||
isBillingUsageNotificationsEnabled: data.billingUsageNotificationsEnabled ?? true,
|
||||
|
||||
@@ -4,6 +4,7 @@ export interface General {
|
||||
isConsoleExpandedByDefault: boolean
|
||||
isDebugModeEnabled: boolean
|
||||
showFloatingControls: boolean
|
||||
showTrainingControls: boolean
|
||||
theme: 'system' | 'light' | 'dark'
|
||||
telemetryEnabled: boolean
|
||||
isLoading: boolean
|
||||
@@ -16,6 +17,7 @@ export interface General {
|
||||
isBillingUsageNotificationsLoading: boolean
|
||||
isBillingUsageNotificationsEnabled: boolean
|
||||
isFloatingControlsLoading: boolean
|
||||
isTrainingControlsLoading: boolean
|
||||
}
|
||||
|
||||
export interface GeneralActions {
|
||||
@@ -24,6 +26,7 @@ export interface GeneralActions {
|
||||
toggleConsoleExpandedByDefault: () => Promise<void>
|
||||
toggleDebugMode: () => void
|
||||
toggleFloatingControls: () => Promise<void>
|
||||
toggleTrainingControls: () => Promise<void>
|
||||
setTheme: (theme: 'system' | 'light' | 'dark') => Promise<void>
|
||||
setTelemetryEnabled: (enabled: boolean) => Promise<void>
|
||||
setBillingUsageNotificationsEnabled: (enabled: boolean) => Promise<void>
|
||||
@@ -39,6 +42,7 @@ export type UserSettings = {
|
||||
autoPan: boolean
|
||||
consoleExpandedByDefault: boolean
|
||||
showFloatingControls: boolean
|
||||
showTrainingControls: boolean
|
||||
telemetryEnabled: boolean
|
||||
isBillingUsageNotificationsEnabled: boolean
|
||||
}
|
||||
|
||||
2
packages/db/migrations/0092_mighty_kinsey_walden.sql
Normal file
2
packages/db/migrations/0092_mighty_kinsey_walden.sql
Normal file
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE "settings" ADD COLUMN "show_floating_controls" boolean DEFAULT true NOT NULL;--> statement-breakpoint
|
||||
ALTER TABLE "settings" ADD COLUMN "show_training_controls" boolean DEFAULT false NOT NULL;
|
||||
6866
packages/db/migrations/meta/0092_snapshot.json
Normal file
6866
packages/db/migrations/meta/0092_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -638,6 +638,13 @@
|
||||
"when": 1758567567287,
|
||||
"tag": "0091_amusing_iron_lad",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 92,
|
||||
"version": "7",
|
||||
"when": 1758740238058,
|
||||
"tag": "0092_mighty_kinsey_walden",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -379,6 +379,10 @@ export const settings = pgTable('settings', {
|
||||
.notNull()
|
||||
.default(true),
|
||||
|
||||
// UI preferences
|
||||
showFloatingControls: boolean('show_floating_controls').notNull().default(true),
|
||||
showTrainingControls: boolean('show_training_controls').notNull().default(false),
|
||||
|
||||
updatedAt: timestamp('updated_at').notNull().defaultNow(),
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user