feat(memory): added memory block and tool (#372)

* feat(memory): added memory block and service

* feat(memory): ran migrations

* improvement(memory): appending memories; console messages

* feat(memory): added agent raw message history input UI

* feat(agent-messages): added agent message history

* improvement: added tests
This commit is contained in:
Emir Karabeg
2025-05-19 02:54:39 -07:00
committed by Emir Karabeg
parent b29827c4ee
commit 0af7fb2a7a
25 changed files with 4029 additions and 94 deletions

View File

@@ -0,0 +1,329 @@
import { NextRequest, NextResponse } from 'next/server'
import { and, eq, isNull } from 'drizzle-orm'
import { createLogger } from '@/lib/logs/console-logger'
import { db } from '@/db'
import { memory } from '@/db/schema'
const logger = createLogger('MemoryByIdAPI')
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
/**
* GET handler for retrieving a specific memory by ID
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
logger.info(`[${requestId}] Processing memory get request for ID: ${id}`)
// Get workflowId from query parameter (required)
const url = new URL(request.url)
const workflowId = url.searchParams.get('workflowId')
if (!workflowId) {
logger.warn(`[${requestId}] Missing required parameter: workflowId`)
return NextResponse.json(
{
success: false,
error: {
message: 'workflowId parameter is required',
},
},
{ status: 400 }
)
}
// Query the database for the memory
const memories = await db
.select()
.from(memory)
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId),
isNull(memory.deletedAt)
)
)
.orderBy(memory.createdAt)
.limit(1)
if (memories.length === 0) {
logger.warn(`[${requestId}] Memory not found: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory not found',
},
},
{ status: 404 }
)
}
logger.info(`[${requestId}] Memory retrieved successfully: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: memories[0],
},
{ status: 200 }
)
} catch (error: any) {
return NextResponse.json(
{
success: false,
error: {
message: error.message || 'Failed to retrieve memory',
},
},
{ status: 500 }
)
}
}
/**
* DELETE handler for removing a specific memory
*/
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
logger.info(`[${requestId}] Processing memory delete request for ID: ${id}`)
// Get workflowId from query parameter (required)
const url = new URL(request.url)
const workflowId = url.searchParams.get('workflowId')
if (!workflowId) {
logger.warn(`[${requestId}] Missing required parameter: workflowId`)
return NextResponse.json(
{
success: false,
error: {
message: 'workflowId parameter is required',
},
},
{ status: 400 }
)
}
// Verify memory exists before attempting to delete
const existingMemory = await db
.select({ id: memory.id })
.from(memory)
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId),
isNull(memory.deletedAt)
)
)
.limit(1)
if (existingMemory.length === 0) {
logger.warn(`[${requestId}] Memory not found: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory not found',
},
},
{ status: 404 }
)
}
// Soft delete by setting deletedAt timestamp
await db
.update(memory)
.set({
deletedAt: new Date(),
updatedAt: new Date()
})
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId)
)
)
logger.info(`[${requestId}] Memory deleted successfully: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: { message: 'Memory deleted successfully' },
},
{ status: 200 }
)
} catch (error: any) {
return NextResponse.json(
{
success: false,
error: {
message: error.message || 'Failed to delete memory',
},
},
{ status: 500 }
)
}
}
/**
* PUT handler for updating a specific memory
*/
export async function PUT(
request: NextRequest,
{ params }: { params: Promise<{ id: string }> }
) {
const requestId = crypto.randomUUID().slice(0, 8)
const { id } = await params
try {
logger.info(`[${requestId}] Processing memory update request for ID: ${id}`)
// Parse request body
const body = await request.json()
const { data, workflowId } = body
if (!data) {
logger.warn(`[${requestId}] Missing required field: data`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory data is required',
},
},
{ status: 400 }
)
}
if (!workflowId) {
logger.warn(`[${requestId}] Missing required field: workflowId`)
return NextResponse.json(
{
success: false,
error: {
message: 'workflowId is required',
},
},
{ status: 400 }
)
}
// Verify memory exists before attempting to update
const existingMemories = await db
.select()
.from(memory)
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId),
isNull(memory.deletedAt)
)
)
.limit(1)
if (existingMemories.length === 0) {
logger.warn(`[${requestId}] Memory not found: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory not found',
},
},
{ status: 404 }
)
}
const existingMemory = existingMemories[0]
// Validate memory data based on the existing memory type
if (existingMemory.type === 'agent') {
if (!data.role || !data.content) {
logger.warn(`[${requestId}] Missing agent memory fields`)
return NextResponse.json(
{
success: false,
error: {
message: 'Agent memory requires role and content',
},
},
{ status: 400 }
)
}
if (!['user', 'assistant', 'system'].includes(data.role)) {
logger.warn(`[${requestId}] Invalid agent role: ${data.role}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Agent role must be user, assistant, or system',
},
},
{ status: 400 }
)
}
}
// Update the memory with new data
await db
.update(memory)
.set({
data,
updatedAt: new Date()
})
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId)
)
)
// Fetch the updated memory
const updatedMemories = await db
.select()
.from(memory)
.where(
and(
eq(memory.key, id),
eq(memory.workflowId, workflowId)
)
)
.limit(1)
logger.info(`[${requestId}] Memory updated successfully: ${id} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: updatedMemories[0],
},
{ status: 200 }
)
} catch (error: any) {
return NextResponse.json(
{
success: false,
error: {
message: error.message || 'Failed to update memory',
},
},
{ status: 500 }
)
}
}

View File

@@ -0,0 +1,335 @@
import { NextRequest, NextResponse } from 'next/server'
import { and, eq, like, isNull } from 'drizzle-orm'
import { db } from '@/db'
import { memory } from '@/db/schema'
import { createLogger } from '@/lib/logs/console-logger'
const logger = createLogger('MemoryAPI')
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
/**
* GET handler for searching and retrieving memories
* Supports query parameters:
* - query: Search string for memory keys
* - type: Filter by memory type
* - limit: Maximum number of results (default: 50)
* - workflowId: Filter by workflow ID (required)
*/
export async function GET(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
logger.info(`[${requestId}] Processing memory search request`)
// Extract workflowId from query parameters
const url = new URL(request.url)
const workflowId = url.searchParams.get('workflowId')
const searchQuery = url.searchParams.get('query')
const type = url.searchParams.get('type')
const limit = parseInt(url.searchParams.get('limit') || '50')
// Require workflowId for security
if (!workflowId) {
logger.warn(`[${requestId}] Missing required parameter: workflowId`)
return NextResponse.json(
{
success: false,
error: {
message: 'workflowId parameter is required',
},
},
{ status: 400 }
)
}
// Build query conditions
const conditions = []
// Only include non-deleted memories
conditions.push(isNull(memory.deletedAt))
// Filter by workflow ID (required)
conditions.push(eq(memory.workflowId, workflowId))
// Add type filter if provided
if (type) {
conditions.push(eq(memory.type, type))
}
// Add search query if provided (leverages index on key field)
if (searchQuery) {
conditions.push(like(memory.key, `%${searchQuery}%`))
}
// Execute the query
const memories = await db
.select()
.from(memory)
.where(and(...conditions))
.orderBy(memory.createdAt)
.limit(limit)
logger.info(`[${requestId}] Found ${memories.length} memories for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: { memories }
},
{ status: 200 }
)
} catch (error: any) {
return NextResponse.json(
{
success: false,
error: {
message: error.message || 'Failed to search memories',
},
},
{ status: 500 }
)
}
}
/**
* POST handler for creating new memories
* Requires:
* - key: Unique identifier for the memory (within workflow scope)
* - type: Memory type ('agent' or 'raw')
* - data: Memory content (varies by type)
* - workflowId: ID of the workflow this memory belongs to
*/
export async function POST(request: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
try {
logger.info(`[${requestId}] Processing memory creation request`)
// Parse request body
const body = await request.json()
const { key, type, data, workflowId } = body
// Validate required fields
if (!key) {
logger.warn(`[${requestId}] Missing required field: key`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory key is required',
},
},
{ status: 400 }
)
}
if (!type || !['agent', 'raw'].includes(type)) {
logger.warn(`[${requestId}] Invalid memory type: ${type}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Valid memory type (agent or raw) is required',
},
},
{ status: 400 }
)
}
if (!data) {
logger.warn(`[${requestId}] Missing required field: data`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory data is required',
},
},
{ status: 400 }
)
}
if (!workflowId) {
logger.warn(`[${requestId}] Missing required field: workflowId`)
return NextResponse.json(
{
success: false,
error: {
message: 'workflowId is required',
},
},
{ status: 400 }
)
}
// Additional validation for agent type
if (type === 'agent') {
if (!data.role || !data.content) {
logger.warn(`[${requestId}] Missing agent memory fields`)
return NextResponse.json(
{
success: false,
error: {
message: 'Agent memory requires role and content',
},
},
{ status: 400 }
)
}
if (!['user', 'assistant', 'system'].includes(data.role)) {
logger.warn(`[${requestId}] Invalid agent role: ${data.role}`)
return NextResponse.json(
{
success: false,
error: {
message: 'Agent role must be user, assistant, or system',
},
},
{ status: 400 }
)
}
}
// Check if memory with the same key already exists for this workflow
const existingMemory = await db
.select()
.from(memory)
.where(
and(
eq(memory.key, key),
eq(memory.workflowId, workflowId),
isNull(memory.deletedAt)
)
)
.limit(1)
if (existingMemory.length > 0) {
logger.info(`[${requestId}] Memory with key ${key} exists, checking if we can append`)
// Check if types match
if (existingMemory[0].type !== type) {
logger.warn(`[${requestId}] Memory type mismatch: existing=${existingMemory[0].type}, new=${type}`)
return NextResponse.json(
{
success: false,
error: {
message: `Cannot append memory of type '${type}' to existing memory of type '${existingMemory[0].type}'`,
},
},
{ status: 400 }
)
}
// Handle appending based on memory type
let updatedData;
if (type === 'agent') {
// For agent type
const newMessage = data;
const existingData = existingMemory[0].data;
// If existing data is an array, append to it
if (Array.isArray(existingData)) {
updatedData = [...existingData, newMessage];
}
// If existing data is a single message object, convert to array
else {
updatedData = [existingData, newMessage];
}
} else {
// For raw type
// Merge objects if they're objects, otherwise use the new data
if (typeof existingMemory[0].data === 'object' && typeof data === 'object') {
updatedData = { ...existingMemory[0].data, ...data };
} else {
updatedData = data;
}
}
// Update the existing memory with appended data
await db
.update(memory)
.set({
data: updatedData,
updatedAt: new Date()
})
.where(
and(
eq(memory.key, key),
eq(memory.workflowId, workflowId)
)
)
// Fetch the updated memory
const updatedMemory = await db
.select()
.from(memory)
.where(
and(
eq(memory.key, key),
eq(memory.workflowId, workflowId)
)
)
.limit(1)
logger.info(`[${requestId}] Memory appended successfully: ${key} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: updatedMemory[0]
},
{ status: 200 }
)
}
// Insert the new memory
const newMemory = {
id: `mem_${crypto.randomUUID().replace(/-/g, '')}`,
workflowId,
key,
type,
data: type === 'agent' ? Array.isArray(data) ? data : [data] : data,
createdAt: new Date(),
updatedAt: new Date()
}
await db.insert(memory).values(newMemory)
logger.info(`[${requestId}] Memory created successfully: ${key} for workflow: ${workflowId}`)
return NextResponse.json(
{
success: true,
data: newMemory
},
{ status: 201 }
)
} catch (error: any) {
// Handle unique constraint violation
if (error.code === '23505') {
logger.warn(`[${requestId}] Duplicate key violation`)
return NextResponse.json(
{
success: false,
error: {
message: 'Memory with this key already exists',
},
},
{ status: 409 }
)
}
return NextResponse.json(
{
success: false,
error: {
message: error.message || 'Failed to create memory',
},
},
{ status: 500 }
)
}
}

View File

@@ -26,6 +26,7 @@ export async function POST(request: NextRequest) {
responseFormat,
workflowId,
stream,
messages,
} = body
let finalApiKey: string
@@ -51,6 +52,7 @@ export async function POST(request: NextRequest) {
responseFormat,
workflowId,
stream,
messages,
})
// Check if the response is a StreamingExecution

View File

@@ -1,5 +1,5 @@
import { useEffect, useRef, useState } from 'react'
import { BookOpen, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
import { BookOpen, Code, Info, RectangleHorizontal, RectangleVertical } from 'lucide-react'
import { Handle, NodeProps, Position, useUpdateNodeInternals } from 'reactflow'
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
@@ -61,6 +61,8 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const isWide = useWorkflowStore((state) => state.blocks[id]?.isWide ?? false)
const blockHeight = useWorkflowStore((state) => state.blocks[id]?.height ?? 0)
const hasActiveWebhook = useWorkflowStore((state) => state.hasActiveWebhook ?? false)
const blockAdvancedMode = useWorkflowStore((state) => state.blocks[id]?.advancedMode ?? false)
const toggleBlockAdvancedMode = useWorkflowStore((state) => state.toggleBlockAdvancedMode)
// Workflow store actions
const updateBlockName = useWorkflowStore((state) => state.updateBlockName)
@@ -257,11 +259,18 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
const blocks = useWorkflowStore.getState().blocks
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId || undefined
const mergedState = mergeSubblockState(blocks, activeWorkflowId, blockId)[blockId]
const isAdvancedMode = useWorkflowStore.getState().blocks[blockId]?.advancedMode ?? false
// Filter visible blocks and those that meet their conditions
const visibleSubBlocks = subBlocks.filter((block) => {
if (block.hidden) return false
// Filter by mode if specified
if (block.mode) {
if (block.mode === 'basic' && isAdvancedMode) return false
if (block.mode === 'advanced' && !isAdvancedMode) return false
}
// If there's no condition, the block should be shown
if (!block.condition) return true
@@ -552,76 +561,86 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
</TooltipContent>
</Tooltip>
)}
{config.longDescription && (
{config.subBlocks.some((block) => block.mode) && (
<Tooltip>
<TooltipTrigger asChild>
{config.docsLink ? (
<Button
variant="ghost"
size="sm"
className="text-gray-500 p-1 h-7"
onClick={(e) => {
e.stopPropagation()
window.open(config.docsLink, '_target', 'noopener,noreferrer')
}}
>
<BookOpen className="h-5 w-5" />
</Button>
) : (
<Button
variant="ghost"
size="sm"
onClick={() => toggleBlockAdvancedMode(id)}
className={cn('text-gray-500 p-1 h-7', blockAdvancedMode && 'text-[#701FFC]')}
>
<Code className="h-5 w-5" />
</Button>
</TooltipTrigger>
<TooltipContent side="top">
{blockAdvancedMode ? 'Switch to Basic Mode' : 'Switch to Advanced Mode'}
</TooltipContent>
</Tooltip>
)}
{config.docsLink ? (
<Tooltip>
<TooltipTrigger asChild>
<Button
variant="ghost"
size="sm"
className="text-gray-500 p-1 h-7"
onClick={(e) => {
e.stopPropagation()
window.open(config.docsLink, '_target', 'noopener,noreferrer')
}}
>
<BookOpen className="h-5 w-5" />
</Button>
</TooltipTrigger>
<TooltipContent side="top">See Docs</TooltipContent>
</Tooltip>
) : (
config.longDescription && (
<Tooltip>
<TooltipTrigger asChild>
<Button variant="ghost" size="sm" className="text-gray-500 p-1 h-7">
<Info className="h-5 w-5" />
</Button>
)}
</TooltipTrigger>
<TooltipContent side="top" className="max-w-[300px] p-4">
<div className="space-y-3">
<div>
<p className="text-sm font-medium mb-1">Description</p>
<p className="text-sm text-muted-foreground">{config.longDescription}</p>
{config.docsLink && (
<p className="text-xs text-blue-500 mt-1">
<a
href={config.docsLink}
target="_blank"
rel="noopener noreferrer"
onClick={(e) => {
e.stopPropagation()
}}
>
View Documentation
</a>
</p>
</TooltipTrigger>
<TooltipContent side="top" className="max-w-[300px] p-4">
<div className="space-y-3">
<div>
<p className="text-sm font-medium mb-1">Description</p>
<p className="text-sm text-muted-foreground">{config.longDescription}</p>
</div>
{config.outputs && (
<div>
<p className="text-sm font-medium mb-1">Output</p>
<div className="text-sm">
{Object.entries(config.outputs).map(([key, value]) => (
<div key={key} className="mb-1">
<span className="text-muted-foreground">{key}</span>{' '}
{typeof value.type === 'object' ? (
<div className="pl-3 mt-1">
{Object.entries(value.type).map(([typeKey, typeValue]) => (
<div key={typeKey} className="flex items-start">
<span className="text-blue-500 font-medium">
{typeKey}:
</span>
<span className="text-green-500 ml-1">
{typeValue as string}
</span>
</div>
))}
</div>
) : (
<span className="text-green-500">{value.type as string}</span>
)}
</div>
))}
</div>
</div>
)}
</div>
{config.outputs && (
<div>
<p className="text-sm font-medium mb-1">Output</p>
<div className="text-sm">
{Object.entries(config.outputs).map(([key, value]) => (
<div key={key} className="mb-1">
<span className="text-muted-foreground">{key}</span>{' '}
{typeof value.type === 'object' ? (
<div className="pl-3 mt-1">
{Object.entries(value.type).map(([typeKey, typeValue]) => (
<div key={typeKey} className="flex items-start">
<span className="text-blue-500 font-medium">{typeKey}:</span>
<span className="text-green-500 ml-1">
{typeValue as string}
</span>
</div>
))}
</div>
) : (
<span className="text-green-500">{value.type as string}</span>
)}
</div>
))}
</div>
</div>
)}
</div>
</TooltipContent>
</Tooltip>
</TooltipContent>
</Tooltip>
)
)}
<Tooltip>
<TooltipTrigger asChild>

View File

@@ -61,6 +61,7 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
layout: 'full',
placeholder: 'Enter system prompt...',
rows: 5,
mode: 'basic',
},
{
id: 'context',
@@ -69,6 +70,16 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
layout: 'full',
placeholder: 'Enter context or user message...',
rows: 3,
mode: 'basic',
},
{
id: 'messages',
title: 'Messages',
type: 'code',
layout: 'full',
mode: 'advanced',
language: 'javascript',
placeholder: '[{"role": "user", "content": "Hello, can you help me with a question?"}]',
},
{
id: 'model',
@@ -226,6 +237,11 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
context: { type: 'string', required: false },
model: { type: 'string', required: true },
apiKey: { type: 'string', required: true },
messages: {
type: 'json',
required: false,
description: 'Array of message objects with role and content fields for advanced chat history control.'
},
responseFormat: {
type: 'json',
required: false,

View File

@@ -22,7 +22,7 @@ export const ConditionBlock: BlockConfig<ConditionBlockOutput> = {
longDescription:
'Add a condition to the workflow to branch the execution path based on a boolean expression.',
docsLink: 'https://docs.simstudio.ai/blocks/condition',
bgColor: '#FF972F',
bgColor: '#FF752F',
icon: ConditionalIcon,
category: 'blocks',
subBlocks: [

View File

@@ -7,32 +7,235 @@ export const MemoryBlock: BlockConfig = {
description: 'Add memory store',
longDescription:
'Create persistent storage for data that needs to be accessed across multiple workflow steps. Store and retrieve information throughout your workflow execution to maintain context and state.',
bgColor: '#FF65BF',
bgColor: '#F64F9E',
icon: BrainIcon,
category: 'blocks',
docsLink: 'https://docs.simstudio.ai/tools/memory',
tools: {
access: [],
access: ['memory_add', 'memory_get', 'memory_get_all', 'memory_delete'],
config: {
tool: (params: Record<string, any>) => {
const operation = params.operation || 'add'
switch (operation) {
case 'add':
return 'memory_add'
case 'get':
return 'memory_get'
case 'getAll':
return 'memory_get_all'
case 'delete':
return 'memory_delete'
default:
return 'memory_add'
}
},
params: (params: Record<string, any>) => {
// Create detailed error information for any missing required fields
const errors: string[] = []
if (!params.operation) {
errors.push('Operation is required')
}
if (params.operation === 'add' || params.operation === 'get' || params.operation === 'delete') {
if (!params.id) {
errors.push(`Memory ID is required for ${params.operation} operation`)
}
}
if (params.operation === 'add') {
if (!params.type) {
errors.push('Memory type is required for add operation')
} else if (params.type === 'agent') {
if (!params.role) {
errors.push('Role is required for agent memory')
}
if (!params.content) {
errors.push('Content is required for agent memory')
}
} else if (params.type === 'raw') {
if (!params.rawData) {
errors.push('Raw data is required for raw memory')
}
}
}
// Throw error if any required fields are missing
if (errors.length > 0) {
throw new Error(`Memory Block Error: ${errors.join(', ')}`)
}
// Base result object
const baseResult: Record<string, any> = {}
// For add operation
if (params.operation === 'add') {
const result: Record<string, any> = {
...baseResult,
id: params.id,
type: params.type,
}
if (params.type === 'agent') {
result.role = params.role
result.content = params.content
} else if (params.type === 'raw') {
result.rawData = params.rawData
}
return result
}
// For get operation
if (params.operation === 'get') {
return {
...baseResult,
id: params.id,
}
}
// For delete operation
if (params.operation === 'delete') {
return {
...baseResult,
id: params.id,
}
}
// For getAll operation
return baseResult
},
},
},
inputs: {
code: { type: 'string', required: true },
timeout: { type: 'number', required: false },
memoryLimit: { type: 'number', required: false },
operation: { type: 'string', required: true },
id: { type: 'string', required: true },
type: { type: 'string', required: false },
role: { type: 'string', required: false },
content: { type: 'string', required: false },
rawData: { type: 'json', required: false },
},
outputs: {
response: {
type: {
result: 'any',
stdout: 'string',
executionTime: 'number',
memory: 'any',
memories: 'any',
id: 'string',
},
},
},
subBlocks: [
{
id: 'code',
id: 'operation',
title: 'Operation',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Add Memory', id: 'add' },
{ label: 'Get All Memories', id: 'getAll' },
{ label: 'Get Memory', id: 'get' },
{ label: 'Delete Memory', id: 'delete' },
],
placeholder: 'Select operation',
},
{
id: 'id',
title: 'ID',
type: 'short-input',
layout: 'full',
placeholder: 'Enter memory identifier',
condition: {
field: 'operation',
value: 'add',
},
},
{
id: 'id',
title: 'ID',
type: 'short-input',
layout: 'full',
placeholder: 'Enter memory identifier to retrieve',
condition: {
field: 'operation',
value: 'get',
},
},
{
id: 'id',
title: 'ID',
type: 'short-input',
layout: 'full',
placeholder: 'Enter memory identifier to delete',
condition: {
field: 'operation',
value: 'delete',
},
},
{
id: 'type',
title: 'Type',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'Agent', id: 'agent' },
{ label: 'Raw', id: 'raw' },
],
placeholder: 'Select memory type',
condition: {
field: 'operation',
value: 'add',
},
},
{
id: 'role',
title: 'Role',
type: 'dropdown',
layout: 'full',
options: [
{ label: 'User', id: 'user' },
{ label: 'Assistant', id: 'assistant' },
{ label: 'System', id: 'system' },
],
placeholder: 'Select agent role',
condition: {
field: 'type',
value: 'agent',
and: {
field: 'operation',
value: 'add',
},
},
},
{
id: 'content',
title: 'Content',
type: 'short-input',
layout: 'full',
placeholder: 'Enter message content',
condition: {
field: 'type',
value: 'agent',
and: {
field: 'operation',
value: 'add',
},
},
},
{
id: 'rawData',
title: 'Raw Data',
type: 'code',
layout: 'full',
},
language: 'json',
placeholder: '{"key": "value"}',
condition: {
field: 'type',
value: 'raw',
and: {
field: 'operation',
value: 'add',
},
},
}
],
}

View File

@@ -25,6 +25,7 @@ import { GoogleDocsBlock } from './blocks/google_docs'
import { GoogleDriveBlock } from './blocks/google_drive'
import { GoogleSheetsBlock } from './blocks/google_sheets'
// import { GuestyBlock } from './blocks/guesty'
import { MemoryBlock } from './blocks/memory'
import { ImageGeneratorBlock } from './blocks/image_generator'
import { JinaBlock } from './blocks/jina'
import { JiraBlock } from './blocks/jira'
@@ -92,6 +93,7 @@ export const registry: Record<string, BlockConfig> = {
pinecone: PineconeBlock,
reddit: RedditBlock,
router: RouterBlock,
memory: MemoryBlock,
s3: S3Block,
serper: SerperBlock,
stagehand: StagehandBlock,

View File

@@ -87,6 +87,7 @@ export interface SubBlockConfig {
title?: string
type: SubBlockType
layout?: SubBlockLayout
mode?: 'basic' | 'advanced' | 'both' // Default is 'both' if not specified
options?:
| string[]
| { label: string; id: string }[]

View File

@@ -1863,16 +1863,27 @@ export function MistralIcon(props: SVGProps<SVGSVGElement>) {
export function BrainIcon(props: SVGProps<SVGSVGElement>) {
return (
<svg {...props} xmlns="http://www.w3.org/2000/svg" viewBox="0 0 48 48" fill="currentColor">
<title>Brain</title>
<g id="Brain">
<path d="M11,5.34c-.12,1,.26.66-1,.66A7,7,0,0,0,4.85,17.75,7,7,0,0,0,2.62,28.48,12.14,12.14,0,0,0,.06,37.22C1,46.59,12.33,51.56,19.31,45A11.66,11.66,0,0,0,23,36.51V6.22C23-1.57,11.86-2.14,11,5.34Zm-.55,40.55a8.89,8.89,0,0,1-4.78-2.18l.42-.42a2.1,2.1,0,0,1,2.42-.4,1,1,0,0,0,.9-1.78c-2-1-1.24.83-2-3.11-.61-3,1.55-4.14,0-4.89-1.79-.9-2.38,3.32-2,5.28.79,4,.85,1.7-1.19,3.94a10,10,0,0,1-.13-12.5c1.51,1,4.86,2,4.86.17a1,1,0,0,0-1-1c-5.79,0-6.94-8.33-1.27-9.82C8,19.87,11,20.73,11,19a1,1,0,0,0-1-1,5,5,0,1,1,1.44-9.77C13,12,18,13,18,11a1,1,0,0,0-1-1,4,4,0,1,1,4-3.78v9.37l-1.16,1.15a3.42,3.42,0,0,1-4.29.43,1,1,0,0,0-1.38.28c-1.12,1.68,3.7,3.68,6.83.92V36.51A9.3,9.3,0,0,1,10.49,45.89Z" />
<path d="M16.21,23.79a3.14,3.14,0,0,0-4.42,0c-1,1-2-.42-3.08-1.5a1,1,0,0,0-1.42,1.42l.86.85-1.47.49A1,1,0,0,0,7.32,27L10.18,26c2.71.74,3.26-2.15,4.61-.79l2.5,2.5a1,1,0,0,0,1.42-1.42Z" />
<path d="M17,33H16a3,3,0,0,0-3,3,1,1,0,0,1-1,1H11a1,1,0,0,0,0,2h1a3,3,0,0,0,3-3,1,1,0,0,1,1-1h1A1,1,0,0,0,17,33Z" />
<path d="M45.36,28.49a7,7,0,0,0-2.21-10.74A7,7,0,0,0,38,6c-1.28,0-.93.35-1-.63A6,6,0,0,0,31,0a6.13,6.13,0,0,0-6,6.22V36.51C25,42.89,30.26,48.4,36.82,48A12,12,0,0,0,45.36,28.49Zm-1.65,13.8A4.92,4.92,0,0,0,42,41c.55-2.79,1.21-4.79-.13-7.47a1,1,0,0,0-1.78.9c1,2.06.45,3.65-.07,6.25-3.33.28-1.92,2.85-.59,2.19s2.33.34,2.88.84A9.28,9.28,0,0,1,27,36.51V18.37c3.12,2.75,8,.76,6.83-.92a1,1,0,0,0-1.38-.28,3.42,3.42,0,0,1-4.29-.43L27,15.59V6.22A4,4,0,1,1,31,10a1,1,0,0,0-1,1c0,2.05,5.07,1,6.57-2.78A5,5,0,1,1,38,18a1,1,0,0,0,0,2,7,7,0,0,0,3.27-.82C47,20.68,45.73,29,40,29a1,1,0,0,0,0,2,6.89,6.89,0,0,0,3.86-1.17C48.62,35.85,44,42.55,43.71,42.29Z" />
<path d="M41,27a1,1,0,0,0,.32-1.95l-1.47-.49.86-.85a1,1,0,0,0-1.42-1.42c-1.09,1.09-2.07,2.51-3.08,1.5a3.14,3.14,0,0,0-4.42,0l-2.5,2.5A1,1,0,0,0,30,28c.56,0,.54-.13,3.21-2.79,1.38-1.38,1.86,1.54,4.61.79C40.92,27,40.78,27,41,27Z" />
<path d="M37,37H36a1,1,0,0,1-1-1,3,3,0,0,0-3-3H31a1,1,0,0,0,0,2h1a1,1,0,0,1,1,1,3,3,0,0,0,3,3h1A1,1,0,0,0,37,37Z" />
</g>
<svg
{...props}
xmlns="http://www.w3.org/2000/svg"
width="24"
height="24"
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M12 5a3 3 0 1 0-5.997.125 4 4 0 0 0-2.526 5.77 4 4 0 0 0 .556 6.588A4 4 0 1 0 12 18Z" />
<path d="M12 5a3 3 0 1 1 5.997.125 4 4 0 0 1 2.526 5.77 4 4 0 0 1-.556 6.588A4 4 0 1 1 12 18Z" />
<path d="M15 13a4.5 4.5 0 0 1-3-4 4.5 4.5 0 0 1-3 4" />
<path d="M17.599 6.5a3 3 0 0 0 .399-1.375" />
<path d="M6.003 5.125A3 3 0 0 0 6.401 6.5" />
<path d="M3.477 10.896a4 4 0 0 1 .585-.396" />
<path d="M19.938 10.5a4 4 0 0 1 .585.396" />
<path d="M6 18a4 4 0 0 1-1.967-.516" />
<path d="M19.967 17.484A4 4 0 0 1 18 18" />
</svg>
)
}

View File

@@ -0,0 +1,15 @@
CREATE TABLE "memory" (
"id" text PRIMARY KEY NOT NULL,
"workflow_id" text,
"key" text NOT NULL,
"type" text NOT NULL,
"data" json NOT NULL,
"created_at" timestamp DEFAULT now() NOT NULL,
"updated_at" timestamp DEFAULT now() NOT NULL,
"deleted_at" timestamp
);
--> statement-breakpoint
ALTER TABLE "memory" ADD CONSTRAINT "memory_workflow_id_workflow_id_fk" FOREIGN KEY ("workflow_id") REFERENCES "public"."workflow"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "memory_key_idx" ON "memory" USING btree ("key");--> statement-breakpoint
CREATE INDEX "memory_workflow_idx" ON "memory" USING btree ("workflow_id");--> statement-breakpoint
CREATE UNIQUE INDEX "memory_workflow_key_idx" ON "memory" USING btree ("workflow_id","key");

File diff suppressed because it is too large Load Diff

View File

@@ -261,6 +261,13 @@
"when": 1747460441992,
"tag": "0037_outgoing_madame_hydra",
"breakpoints": true
},
{
"idx": 38,
"version": "7",
"when": 1747559012564,
"tag": "0038_shocking_thor",
"breakpoints": true
}
]
}
}

View File

@@ -7,6 +7,7 @@ import {
text,
timestamp,
uniqueIndex,
index,
} from 'drizzle-orm/pg-core'
export const user = pgTable('user', {
@@ -380,3 +381,29 @@ export const workspaceInvitation = pgTable('workspace_invitation', {
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
})
export const memory = pgTable(
'memory',
{
id: text('id').primaryKey(),
workflowId: text('workflow_id').references(() => workflow.id, { onDelete: 'cascade' }),
key: text('key').notNull(), // Identifier for the memory within its context
type: text('type').notNull(), // 'agent' or 'raw'
data: json('data').notNull(), // Stores either agent message data or raw data
createdAt: timestamp('created_at').notNull().defaultNow(),
updatedAt: timestamp('updated_at').notNull().defaultNow(),
deletedAt: timestamp('deleted_at'),
},
(table) => {
return {
// Add index on key for faster lookups
keyIdx: index('memory_key_idx').on(table.key),
// Add index on workflowId for faster filtering
workflowIdx: index('memory_workflow_idx').on(table.workflowId),
// Compound unique index to ensure keys are unique per workflow
uniqueKeyPerWorkflowIdx: uniqueIndex('memory_workflow_key_idx').on(table.workflowId, table.key),
}
}
)

View File

@@ -761,6 +761,142 @@ describe('AgentBlockHandler', () => {
)
})
// Tests for raw messages parameter
it('should execute with raw JSON messages array', async () => {
const inputs = {
model: 'gpt-4o',
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: 'Hello, how are you?' }
],
apiKey: 'test-api-key',
}
mockGetProviderFromModel.mockReturnValue('openai')
await handler.execute(mockBlock, inputs, mockContext)
const fetchCall = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCall[1].body)
// Verify messages were sent to the provider
expect(requestBody.messages).toBeDefined()
expect(requestBody.messages.length).toBe(2)
expect(requestBody.messages[0].role).toBe('system')
expect(requestBody.messages[1].role).toBe('user')
// Verify system prompt and context are not included
expect(requestBody.systemPrompt).toBeUndefined()
expect(requestBody.context).toBeUndefined()
})
it('should parse and use messages with single quotes', async () => {
const inputs = {
model: 'gpt-4o',
// Single-quoted JSON format
messages: `[{'role': 'system', 'content': 'You are a helpful assistant.'}, {'role': 'user', 'content': 'Hello, how are you?'}]`,
apiKey: 'test-api-key',
}
mockGetProviderFromModel.mockReturnValue('openai')
await handler.execute(mockBlock, inputs, mockContext)
const fetchCall = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCall[1].body)
// Verify messages were parsed and sent to the provider
expect(requestBody.messages).toBeDefined()
expect(requestBody.messages.length).toBe(2)
expect(requestBody.messages[0].role).toBe('system')
expect(requestBody.messages[0].content).toBe('You are a helpful assistant.')
expect(requestBody.messages[1].role).toBe('user')
expect(requestBody.messages[1].content).toBe('Hello, how are you?')
})
it('should prioritize messages over systemPrompt and context when both are provided', async () => {
const inputs = {
model: 'gpt-4o',
// Valid messages array should take priority
messages: [
{ role: 'system', content: 'You are an AI assistant.' },
{ role: 'user', content: 'What is the capital of France?' }
],
// These should be ignored since messages are valid
systemPrompt: 'You are a helpful assistant.',
context: 'Tell me about the weather.',
apiKey: 'test-api-key',
}
mockGetProviderFromModel.mockReturnValue('openai')
await handler.execute(mockBlock, inputs, mockContext)
const fetchCall = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCall[1].body)
// Verify messages were sent to the provider
expect(requestBody.messages).toBeDefined()
expect(requestBody.messages.length).toBe(2)
expect(requestBody.messages[0].content).toBe('You are an AI assistant.')
expect(requestBody.messages[1].content).toBe('What is the capital of France?')
// Verify system prompt and context are not included
expect(requestBody.systemPrompt).toBeUndefined()
expect(requestBody.context).toBeUndefined()
})
it('should fall back to systemPrompt and context if messages array is invalid', async () => {
const inputs = {
model: 'gpt-4o',
// Invalid messages array (missing required 'role' field)
messages: [
{ content: 'This message is missing the role field' },
{ role: 'user', content: 'Hello' }
],
// These should be used as fallback
systemPrompt: 'You are a helpful assistant.',
context: 'Help the user with their query.',
apiKey: 'test-api-key',
}
mockGetProviderFromModel.mockReturnValue('openai')
await handler.execute(mockBlock, inputs, mockContext)
const fetchCall = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCall[1].body)
// Verify fallback to systemPrompt and context
expect(requestBody.messages).toBeUndefined()
expect(requestBody.systemPrompt).toBe('You are a helpful assistant.')
expect(requestBody.context).toBe('Help the user with their query.')
})
it('should handle messages with mixed quote styles', async () => {
const inputs = {
model: 'gpt-4o',
// Mixed quote styles as shown in the user's example
messages: `[{'role': 'system', "content": "Only answer questions about the United States. If someone asks about something else, just say you can't help with that."}, {"role": "user", "content": "What's the capital of Bosnia and Herzegovina?"}]`,
apiKey: 'test-api-key',
}
mockGetProviderFromModel.mockReturnValue('openai')
await handler.execute(mockBlock, inputs, mockContext)
const fetchCall = mockFetch.mock.calls[0]
const requestBody = JSON.parse(fetchCall[1].body)
// Verify messages were parsed and sent to the provider
expect(requestBody.messages).toBeDefined()
expect(requestBody.messages.length).toBe(2)
expect(requestBody.messages[0].role).toBe('system')
expect(requestBody.messages[0].content).toBe("Only answer questions about the United States. If someone asks about something else, just say you can't help with that.")
expect(requestBody.messages[1].role).toBe('user')
expect(requestBody.messages[1].content).toBe("What's the capital of Bosnia and Herzegovina?")
})
it('should handle streaming responses with text/event-stream content type', async () => {
const mockStreamBody = {
getReader: vi.fn().mockReturnValue({

View File

@@ -188,16 +188,86 @@ export class AgentBlockHandler implements BlockHandler {
)
}
// Parse messages if they're in string format
let parsedMessages = inputs.messages;
if (typeof inputs.messages === 'string' && inputs.messages.trim()) {
try {
// Fast path: try standard JSON.parse first
try {
parsedMessages = JSON.parse(inputs.messages);
logger.info('Successfully parsed messages from JSON format');
} catch (jsonError) {
// Fast direct approach for single-quoted JSON
// Replace single quotes with double quotes, but keep single quotes inside double quotes
// This optimized approach handles the most common cases in one pass
const preprocessed = inputs.messages
// Ensure we have valid JSON by replacing all single quotes with double quotes,
// except those inside existing double quotes
.replace(/(['"])(.*?)\1/g, (match, quote, content) => {
if (quote === '"') return match; // Keep existing double quotes intact
return `"${content}"`; // Replace single quotes with double quotes
});
try {
parsedMessages = JSON.parse(preprocessed);
logger.info('Successfully parsed messages after single-quote preprocessing');
} catch (preprocessError) {
// Ultimate fallback: simply replace all single quotes
try {
parsedMessages = JSON.parse(inputs.messages.replace(/'/g, '"'));
logger.info('Successfully parsed messages using direct quote replacement');
} catch (finalError) {
logger.error('All parsing attempts failed', {
original: inputs.messages,
error: finalError
});
// Keep original value
}
}
}
} catch (error) {
logger.error('Failed to parse messages from string:', { error });
// Keep original value if all parsing fails
}
}
// Fast validation of parsed messages
const validMessages = Array.isArray(parsedMessages) &&
parsedMessages.length > 0 &&
parsedMessages.every(msg =>
typeof msg === 'object' &&
msg !== null &&
'role' in msg &&
typeof msg.role === 'string' &&
(
'content' in msg ||
(msg.role === 'assistant' && ('function_call' in msg || 'tool_calls' in msg))
)
);
if (Array.isArray(parsedMessages) && parsedMessages.length > 0 && !validMessages) {
logger.warn('Messages array has invalid format:', {
messageCount: parsedMessages.length
});
} else if (validMessages) {
logger.info('Messages validated successfully');
}
// Debug request before sending to provider
const providerRequest = {
provider: providerId,
model,
systemPrompt: inputs.systemPrompt,
context: Array.isArray(inputs.context)
? JSON.stringify(inputs.context, null, 2)
: typeof inputs.context === 'string'
? inputs.context
: JSON.stringify(inputs.context, null, 2),
// If messages are provided (advanced mode), use them exclusively and skip systemPrompt/context
...(validMessages
? { messages: parsedMessages }
: {
systemPrompt: inputs.systemPrompt,
context: Array.isArray(inputs.context)
? JSON.stringify(inputs.context, null, 2)
: typeof inputs.context === 'string'
? inputs.context
: JSON.stringify(inputs.context, null, 2),
}),
tools: formattedTools.length > 0 ? formattedTools : undefined,
temperature: inputs.temperature,
maxTokens: inputs.maxTokens,
@@ -209,14 +279,18 @@ export class AgentBlockHandler implements BlockHandler {
logger.info(`Provider request prepared`, {
model: providerRequest.model,
hasSystemPrompt: !!providerRequest.systemPrompt,
hasContext: !!providerRequest.context,
hasMessages: Array.isArray(parsedMessages) && parsedMessages.length > 0,
hasSystemPrompt: !(Array.isArray(parsedMessages) && parsedMessages.length > 0) && !!inputs.systemPrompt,
hasContext: !(Array.isArray(parsedMessages) && parsedMessages.length > 0) && !!inputs.context,
hasTools: !!providerRequest.tools,
hasApiKey: !!providerRequest.apiKey,
workflowId: providerRequest.workflowId,
stream: shouldUseStreaming,
isBlockSelectedForOutput,
hasOutgoingConnections,
// Debug info about messages to help diagnose issues
messagesProvided: 'messages' in providerRequest,
messagesCount: 'messages' in providerRequest && Array.isArray(providerRequest.messages) ? providerRequest.messages.length : 0
})
const baseUrl = env.NEXT_PUBLIC_APP_URL || ''

View File

@@ -770,6 +770,57 @@ export const useWorkflowStore = create<WorkflowStoreWithHistory>()(
get().sync.markDirty()
get().sync.forceSync()
},
toggleBlockAdvancedMode: (id: string) => {
const block = get().blocks[id]
if (!block) return
const newState = {
blocks: {
...get().blocks,
[id]: {
...block,
advancedMode: !block.advancedMode,
},
},
edges: [...get().edges],
loops: { ...get().loops },
}
set(newState)
// Clear the appropriate subblock values based on the new mode
const activeWorkflowId = useWorkflowRegistry.getState().activeWorkflowId
if (activeWorkflowId) {
const subBlockStore = useSubBlockStore.getState()
const blockValues = subBlockStore.workflowValues[activeWorkflowId]?.[id] || {}
const updatedValues = { ...blockValues }
if (!block.advancedMode) {
// Switching TO advanced mode, clear system prompt and context (basic mode fields)
updatedValues.systemPrompt = null
updatedValues.context = null
} else {
// Switching TO basic mode, clear messages (advanced mode field)
updatedValues.messages = null
}
// Update subblock store with the cleared values
useSubBlockStore.setState({
workflowValues: {
...subBlockStore.workflowValues,
[activeWorkflowId]: {
...subBlockStore.workflowValues[activeWorkflowId],
[id]: updatedValues
}
}
})
}
get().triggerUpdate()
get().sync.markDirty()
get().sync.forceSync()
},
})),
{ name: 'workflow-store' }
)

View File

@@ -17,6 +17,7 @@ export interface BlockState {
horizontalHandles?: boolean
isWide?: boolean
height?: number
advancedMode?: boolean
}
export interface SubBlockState {
@@ -78,6 +79,7 @@ export interface WorkflowActions {
setDeploymentStatus: (isDeployed: boolean, deployedAt?: Date) => void
setScheduleStatus: (hasActiveSchedule: boolean) => void
setWebhookStatus: (hasActiveWebhook: boolean) => void
toggleBlockAdvancedMode: (id: string) => void
// Add the sync control methods to the WorkflowActions interface
sync: SyncControl

View File

@@ -0,0 +1,170 @@
import { ToolConfig } from '../types'
import { MemoryResponse } from './types'
// Add Memory Tool
export const memoryAddTool: ToolConfig<any, MemoryResponse> = {
id: 'memory_add',
name: 'Add Memory',
description: 'Add a new memory to the database or append to existing memory with the same ID. When appending to existing memory, the memory types must match.',
version: '1.0.0',
params: {
id: {
type: 'string',
required: true,
description: 'Identifier for the memory. If a memory with this ID already exists, the new data will be appended to it.',
},
type: {
type: 'string',
required: true,
description: 'Type of memory (agent or raw)',
},
role: {
type: 'string',
required: false,
description: 'Role for agent memory (user, assistant, or system)',
},
content: {
type: 'string',
required: false,
description: 'Content for agent memory',
},
rawData: {
type: 'json',
required: false,
description: 'Raw data to store (JSON format)',
}
},
request: {
url: '/api/memory',
method: 'POST',
headers: () => ({
'Content-Type': 'application/json',
}),
body: (params) => {
// Get workflowId from context (set by workflow execution)
const workflowId = params._context?.workflowId
// Prepare error response instead of throwing error
if (!workflowId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workflowId is required and must be provided in execution context'
}
}
}
}
}
const body: Record<string, any> = {
key: params.id,
type: params.type,
workflowId
}
// Set data based on type
if (params.type === 'agent') {
if (!params.role || !params.content) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'Role and content are required for agent memory'
}
}
}
}
}
body.data = {
role: params.role,
content: params.content,
}
} else if (params.type === 'raw') {
if (!params.rawData) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'Raw data is required for raw memory'
}
}
}
}
}
let parsedRawData
if (typeof params.rawData === 'string') {
try {
parsedRawData = JSON.parse(params.rawData)
} catch (e) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'Invalid JSON for raw data'
}
}
}
}
}
} else {
parsedRawData = params.rawData
}
body.data = parsedRawData
}
return body
},
isInternalRoute: true,
},
transformResponse: async (response): Promise<MemoryResponse> => {
try {
const result = await response.json()
if (!response.ok) {
const errorMessage = result.error?.message || 'Failed to add memory'
throw new Error(errorMessage)
}
const data = result.data || result
const isNewMemory = response.status === 201
return {
success: true,
output: {
memory: data.data,
message: isNewMemory ? 'Memory created successfully' : 'Memory appended successfully'
},
}
} catch (error: any) {
return {
success: false,
output: {
memory: undefined,
message: `Failed to add memory: ${error.message || 'Unknown error occurred'}`
},
}
}
},
transformError: async (error): Promise<MemoryResponse> => {
const errorMessage = `Memory operation failed: ${error.message || 'Unknown error occurred'}`;
return {
success: false,
output: {
memory: undefined,
message: `Memory operation failed: ${error.message || 'Unknown error occurred'}`
},
error: errorMessage
}
},
}

View File

@@ -0,0 +1,83 @@
import { ToolConfig } from '../types'
import { MemoryResponse } from './types'
// Delete Memory Tool
export const memoryDeleteTool: ToolConfig<any, MemoryResponse> = {
id: 'memory_delete',
name: 'Delete Memory',
description: 'Delete a specific memory by its ID',
version: '1.0.0',
params: {
id: {
type: 'string',
required: true,
description: 'Identifier for the memory to delete',
}
},
request: {
url: (params): any => {
// Get workflowId from context (set by workflow execution)
const workflowId = params._context?.workflowId
if (!workflowId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workflowId is required and must be provided in execution context'
}
}
}
}
}
// Append workflowId as query parameter
return `/api/memory/${encodeURIComponent(params.id)}?workflowId=${encodeURIComponent(workflowId)}`
},
method: 'DELETE',
headers: () => ({
'Content-Type': 'application/json',
}),
isInternalRoute: true,
},
transformResponse: async (response): Promise<MemoryResponse> => {
try {
const result = await response.json()
if (!response.ok) {
const errorMessage = result.error?.message || 'Failed to delete memory'
throw new Error(errorMessage)
}
return {
success: true,
output: {
memory: undefined,
message: `Deleted memory.`
},
}
} catch (error: any) {
return {
success: false,
output: {
memory: undefined,
message: `Failed to delete memory: ${error.message || 'Unknown error'}`
},
error: `Failed to delete memory: ${error.message || 'Unknown error'}`
}
}
},
transformError: async (error): Promise<MemoryResponse> => {
const errorMessage = `Memory deletion failed: ${error.message || 'Unknown error'}`
return {
success: false,
output: {
memory: undefined,
message: errorMessage
},
error: errorMessage
}
},
}

View File

@@ -0,0 +1,88 @@
import { ToolConfig } from '../types'
import { MemoryResponse } from './types'
// Get All Memories Tool
export const memoryGetAllTool: ToolConfig<any, MemoryResponse> = {
id: 'memory_get_all',
name: 'Get All Memories',
description: 'Retrieve all memories from the database',
version: '1.0.0',
params: {},
request: {
url: (params): any => {
// Get workflowId from context (set by workflow execution)
const workflowId = params._context?.workflowId
if (!workflowId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workflowId is required and must be provided in execution context'
}
}
}
}
}
// Append workflowId as query parameter
return `/api/memory?workflowId=${encodeURIComponent(workflowId)}`
},
method: 'GET',
headers: () => ({
'Content-Type': 'application/json',
}),
isInternalRoute: true,
},
transformResponse: async (response): Promise<MemoryResponse> => {
try {
const result = await response.json()
if (!response.ok) {
const errorMessage = result.error?.message || 'Failed to retrieve memories'
throw new Error(errorMessage)
}
// Extract memories from the response
const data = result.data || result
let rawMemories = data.memories || data || [];
// Transform memories to return them with their keys and types for better context
const memories = rawMemories.map((memory: any) => ({
key: memory.key,
type: memory.type,
data: memory.data
}));
return {
success: true,
output: {
memories,
message: 'Memories retrieved successfully'
},
}
} catch (error: any) {
return {
success: false,
output: {
memories: [],
message: `Failed to retrieve memories: ${error.message || 'Unknown error'}`
},
error: `Failed to retrieve memories: ${error.message || 'Unknown error'}`
}
}
},
transformError: async (error): Promise<MemoryResponse> => {
const errorMessage = `Memory retrieval failed: ${error.message || 'Unknown error'}`
return {
success: false,
output: {
memories: [],
message: errorMessage
},
error: errorMessage
}
},
}

View File

@@ -0,0 +1,85 @@
import { ToolConfig } from '../types'
import { MemoryResponse } from './types'
// Get Memory Tool
export const memoryGetTool: ToolConfig<any, MemoryResponse> = {
id: 'memory_get',
name: 'Get Memory',
description: 'Retrieve a specific memory by its ID',
version: '1.0.0',
params: {
id: {
type: 'string',
required: true,
description: 'Identifier for the memory to retrieve',
}
},
request: {
url: (params): any => {
// Get workflowId from context (set by workflow execution)
const workflowId = params._context?.workflowId
if (!workflowId) {
return {
_errorResponse: {
status: 400,
data: {
success: false,
error: {
message: 'workflowId is required and must be provided in execution context'
}
}
}
}
}
// Append workflowId as query parameter
return `/api/memory/${encodeURIComponent(params.id)}?workflowId=${encodeURIComponent(workflowId)}`
},
method: 'GET',
headers: () => ({
'Content-Type': 'application/json',
}),
isInternalRoute: true,
},
transformResponse: async (response): Promise<MemoryResponse> => {
try {
const result = await response.json()
if (!response.ok) {
const errorMessage = result.error?.message || 'Failed to retrieve memory'
throw new Error(errorMessage)
}
const data = result.data || result
return {
success: true,
output: {
memory: data.data,
message: 'Memory retrieved successfully'
},
}
} catch (error: any) {
return {
success: false,
output: {
memory: undefined,
message: `Failed to retrieve memory: ${error.message || 'Unknown error'}`
},
error: `Failed to retrieve memory: ${error.message || 'Unknown error'}`
}
}
},
transformError: async (error): Promise<MemoryResponse> => {
const errorMessage = `Memory retrieval failed: ${error.message || 'Unknown error'}`
return {
success: false,
output: {
memory: undefined,
message: errorMessage
},
error: errorMessage
}
},
}

View File

@@ -0,0 +1,6 @@
import { memoryAddTool } from './add_memory'
import { memoryGetTool } from './get_memory'
import { memoryGetAllTool } from './get_all_memories'
import { memoryDeleteTool } from './delete_memory'
export { memoryAddTool, memoryGetTool, memoryGetAllTool, memoryDeleteTool }

View File

@@ -0,0 +1,35 @@
import { ToolResponse } from '../types'
export interface MemoryResponse extends ToolResponse {
output: {
memory?: any
memories?: any[]
message: string
}
}
export interface AgentMemoryData {
role: 'user' | 'assistant' | 'system'
content: string
}
export interface RawMemoryData {
[key: string]: any
}
export interface MemoryRecord {
id: string
key: string
type: 'agent' | 'raw'
data: AgentMemoryData[] | RawMemoryData
createdAt: string
updatedAt: string
workflowId?: string
workspaceId?: string
}
export interface MemoryError {
code: string
message: string
details?: Record<string, any>
}

View File

@@ -42,6 +42,7 @@ import { readUrlTool } from './jina'
import { jiraBulkRetrieveTool, jiraRetrieveTool, jiraUpdateTool, jiraWriteTool } from './jira'
import { linkupSearchTool } from './linkup'
import { mem0AddMemoriesTool, mem0GetMemoriesTool, mem0SearchMemoriesTool } from './mem0'
import { memoryAddTool, memoryGetTool, memoryGetAllTool, memoryDeleteTool } from './memory'
import { mistralParserTool } from './mistral'
import { notionReadTool, notionWriteTool } from './notion'
import { dalleTool, embeddingsTool as openAIEmbeddings } from './openai'
@@ -154,6 +155,10 @@ export const tools: Record<string, ToolConfig> = {
mem0_add_memories: mem0AddMemoriesTool,
mem0_search_memories: mem0SearchMemoriesTool,
mem0_get_memories: mem0GetMemoriesTool,
memory_add: memoryAddTool,
memory_get: memoryGetTool,
memory_get_all: memoryGetAllTool,
memory_delete: memoryDeleteTool,
elevenlabs_tts: elevenLabsTtsTool,
s3_get_object: s3GetObjectTool,
telegram_message: telegramMessageTool,