feat(assistant): added openai to assist in generating js/ts code and json schemas (#231)

* change hiddenFromSidebar to hideFromToolbar

* added code generation for the function block's code

* added ai code gen to custom tools and response format

* optimize system prompts for js/json generation following our formatting

* added history, fixed apply to replace instead of append

* remove unused imports

* updated styling of chat bar

* refined system prompts for code generation, resolved PR comments
This commit is contained in:
Waleed Latif
2025-04-06 18:29:44 -07:00
committed by GitHub
parent d97ecead5d
commit a74c866a81
14 changed files with 1628 additions and 157 deletions

View File

@@ -0,0 +1,432 @@
import { NextRequest, NextResponse } from 'next/server'
import OpenAI from 'openai'
import { createLogger } from '@/lib/logs/console-logger'
export const dynamic = 'force-dynamic'
export const runtime = 'nodejs'
const logger = createLogger('GenerateCodeAPI')
let openai: OpenAI | null = null
if (process.env.OPENAI_API_KEY) {
openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
})
} else {
logger.warn('OPENAI_API_KEY not found. Code generation API will not function.')
}
type GenerationType = 'json-schema' | 'javascript-function-body' | 'typescript-function-body'
// Define the structure for a single message in the history
interface ChatMessage {
role: 'user' | 'assistant' | 'system' // System role might be needed if we include the initial system prompt in history
content: string
}
interface RequestBody {
prompt: string
generationType: GenerationType
context?: string
stream?: boolean
history?: ChatMessage[] // Optional conversation history
}
const systemPrompts: Record<GenerationType, string> = {
'json-schema': `You are an expert programmer specializing in creating JSON schemas according to a specific format.
Generate ONLY the JSON schema based on the user's request.
The output MUST be a single, valid JSON object, starting with { and ending with }.
The JSON object MUST have the following top-level properties: 'name' (string), 'description' (string), 'strict' (boolean, usually true), and 'schema' (object).
The 'schema' object must define the structure and MUST contain 'type': 'object', 'properties': {...}, 'additionalProperties': false, and 'required': [...].
Inside 'properties', use standard JSON Schema properties (type, description, enum, items for arrays, etc.).
Do not include any explanations, markdown formatting, or other text outside the JSON object.
Valid Schema Examples:
Example 1:
{
"name": "reddit_post",
"description": "Fetches the reddit posts in the given subreddit",
"strict": true,
"schema": {
"type": "object",
"properties": {
"title": {
"type": "string",
"description": "The title of the post"
},
"content": {
"type": "string",
"description": "The content of the post"
}
},
"additionalProperties": false,
"required": [ "title", "content" ]
}
}
Example 2:
{
"name": "get_weather",
"description": "Fetches the current weather for a specific location.",
"strict": true,
"schema": {
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g., San Francisco, CA"
},
"unit": {
"type": "string",
"description": "Temperature unit",
"enum": ["celsius", "fahrenheit"]
}
},
"additionalProperties": false,
"required": ["location", "unit"]
}
}
Example 3 (Array Input):
{
"name": "process_items",
"description": "Processes a list of items with specific IDs.",
"strict": true,
"schema": {
"type": "object",
"properties": {
"item_ids": {
"type": "array",
"description": "A list of unique item identifiers to process.",
"items": {
"type": "string",
"description": "An item ID"
}
},
"processing_mode": {
"type": "string",
"description": "The mode for processing",
"enum": ["fast", "thorough"]
}
},
"additionalProperties": false,
"required": ["item_ids", "processing_mode"]
}
}
`,
'javascript-function-body': `You are an expert JavaScript programmer.
Generate ONLY the raw body of a JavaScript function based on the user's request.
The code should be executable within an 'async function(params, environmentVariables) {...}' context.
- 'params' (object): Contains input parameters derived from the JSON schema. Access these directly using the parameter name wrapped in angle brackets, e.g., '<paramName>'. Do NOT use 'params.paramName'.
- 'environmentVariables' (object): Contains environment variables. Reference these using the double curly brace syntax: '{{ENV_VAR_NAME}}'. Do NOT use 'environmentVariables.VAR_NAME' or process.env.
IMPORTANT FORMATTING RULES:
1. Reference Environment Variables: Use the exact syntax {{VARIABLE_NAME}}. Do NOT wrap it in quotes (e.g., use 'apiKey = {{SERVICE_API_KEY}}' not 'apiKey = "{{SERVICE_API_KEY}}"'). Our system replaces these placeholders before execution.
2. Reference Input Parameters/Workflow Variables: Use the exact syntax <variable_name>. Do NOT wrap it in quotes (e.g., use 'userId = <userId>;' not 'userId = "<userId>";'). This includes parameters defined in the block's schema and outputs from previous blocks.
3. Function Body ONLY: Do NOT include the function signature (e.g., 'async function myFunction() {' or the surrounding '}').
4. Imports: Do NOT include import/require statements unless they are standard Node.js built-in modules (e.g., 'crypto', 'fs'). External libraries are not supported in this context.
5. Output: Ensure the code returns a value if the function is expected to produce output. Use 'return'.
6. Clarity: Write clean, readable code.
7. No Explanations: Do NOT include markdown formatting, comments explaining the rules, or any text other than the raw JavaScript code for the function body.
Example Scenario:
User Prompt: "Fetch user data from an API. Use the User ID passed in as 'userId' and an API Key stored as the 'SERVICE_API_KEY' environment variable."
Generated Code:
const userId = <block.response.content>; // Correct: Accessing input parameter without quotes
const apiKey = {{SERVICE_API_KEY}}; // Correct: Accessing environment variable without quotes
const url = \`https://api.example.com/users/\${userId}\`;
try {
const response = await fetch(url, {
method: 'GET',
headers: {
'Authorization': \`Bearer \${apiKey}\`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
// Throwing an error will mark the block execution as failed
throw new Error(\`API request failed with status \${response.status}: \${await response.text()}\`);
}
const data = await response.json();
console.log('User data fetched successfully.'); // Optional: logging for debugging
return data; // Return the fetched data which becomes the block's output
} catch (error) {
console.error(\`Error fetching user data: \${error.message}\`);
// Re-throwing the error ensures the workflow knows this step failed.
throw error;
}`,
'typescript-function-body': `You are an expert TypeScript programmer.
Generate ONLY the body of a TypeScript function based on the user's request.
The code should be executable within an async context. You have access to a 'params' object (typed as Record<string, any>) containing input parameters and an 'environmentVariables' object (typed as Record<string, string>) for env vars.
Do not include the function signature (e.g., 'async function myFunction(): Promise<any> {').
Do not include import/require statements unless absolutely necessary and they are standard Node.js modules.
Do not include markdown formatting or explanations.
Output only the raw TypeScript code. Use modern TypeScript features where appropriate. Do not use semicolons.
Example:
const userId = <block.response.content> as string
const apiKey = {{SERVICE_API_KEY}}
const response = await fetch(\`https://api.example.com/users/\${userId}\`, { headers: { Authorization: \`Bearer \${apiKey}\` } })
if (!response.ok) {
throw new Error(\`Failed to fetch user data: \${response.statusText}\`)
}
const data: unknown = await response.json()
// Add type checking/assertion if necessary
return data // Ensure you return a value if expected`,
}
export async function POST(req: NextRequest) {
const requestId = crypto.randomUUID().slice(0, 8)
logger.info(`[${requestId}] Received code generation request`)
if (!openai) {
logger.error(`[${requestId}] OpenAI client not initialized. Missing API key.`)
return NextResponse.json(
{ success: false, error: 'Code generation service is not configured.' },
{ status: 503 }
)
}
try {
const body = (await req.json()) as RequestBody
// Destructure history along with other fields
const { prompt, generationType, context, stream = false, history = [] } = body
if (!prompt || !generationType) {
logger.warn(`[${requestId}] Invalid request: Missing prompt or generationType.`)
return NextResponse.json(
{ success: false, error: 'Missing required fields: prompt and generationType.' },
{ status: 400 }
)
}
if (!systemPrompts[generationType]) {
logger.warn(`[${requestId}] Invalid generationType: ${generationType}`)
return NextResponse.json(
{ success: false, error: `Invalid generationType: ${generationType}` },
{ status: 400 }
)
}
const systemPrompt = systemPrompts[generationType]
// Construct the user message, potentially including context
const currentUserMessageContent = context
? `Prompt: ${prompt}\\n\\nExisting Content/Context:\\n${context}`
: `${prompt}` // Keep it simple for follow-ups, context is in history
// Prepare messages for OpenAI API
// Start with the system prompt
const messages: ChatMessage[] = [{ role: 'system', content: systemPrompt }]
// Add previous messages from history
// Filter out any potential system messages from history if we always prepend a fresh one
messages.push(...history.filter((msg) => msg.role !== 'system'))
// Add the current user prompt
messages.push({ role: 'user', content: currentUserMessageContent })
logger.debug(`[${requestId}] Calling OpenAI API`, {
generationType,
stream,
historyLength: history.length,
})
// For streaming responses
if (stream) {
const encoder = new TextEncoder()
const streamResponse = new TransformStream()
const writer = streamResponse.writable.getWriter()
// Start streaming response
const streamOpenAI = async () => {
try {
const streamCompletion = await openai!.chat.completions.create({
// Use non-null assertion as openai is checked above
model: 'gpt-4o',
// Pass the constructed messages array
messages: messages,
temperature: 0.2,
max_tokens: 1500,
stream: true,
})
// Conditionally initialize fullContent only if needed for validation
let fullContent = generationType === 'json-schema' ? '' : undefined
// Process each chunk
for await (const chunk of streamCompletion) {
const content = chunk.choices[0]?.delta?.content || ''
if (content) {
// Only append if fullContent is defined (i.e., for json-schema)
if (fullContent !== undefined) {
fullContent += content
}
// Send the chunk to the client
const payload = encoder.encode(
JSON.stringify({
chunk: content,
done: false,
}) + '\n'
)
await writer.write(payload)
}
}
// Check JSON validity for json-schema type when streaming is complete
if (generationType === 'json-schema') {
try {
JSON.parse(fullContent!)
} catch (parseError: any) {
logger.error(`[${requestId}] Generated JSON schema is invalid`, {
error: parseError.message,
content: fullContent,
})
// Send error to client
const errorPayload = encoder.encode(
JSON.stringify({
error: 'Generated JSON schema was invalid.',
done: true,
}) + '\n'
)
await writer.write(errorPayload)
await writer.close()
return
}
}
// Send the final done message
const donePayload = encoder.encode(
JSON.stringify({
done: true,
...(fullContent !== undefined && { fullContent: fullContent }),
}) + '\n'
)
await writer.write(donePayload)
await writer.close()
logger.info(`[${requestId}] Code generation streaming completed`, { generationType })
} catch (error: any) {
logger.error(`[${requestId}] Streaming error`, {
error: error.message || 'Unknown error',
stack: error.stack,
})
const clientErrorMessage = 'An error occurred during code generation streaming.'
// Send error to client
const errorPayload = encoder.encode(
JSON.stringify({
error: clientErrorMessage,
done: true,
}) + '\n'
)
await writer.write(errorPayload)
await writer.close()
}
}
// Start streaming asynchronously
streamOpenAI()
return new Response(streamResponse.readable, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
Connection: 'keep-alive',
},
})
}
// For non-streaming responses (original implementation)
const completion = await openai!.chat.completions.create({
// Use non-null assertion
model: 'gpt-4o',
// Pass the constructed messages array
messages: messages,
temperature: 0.2,
max_tokens: 1500,
response_format: generationType === 'json-schema' ? { type: 'json_object' } : undefined,
})
const generatedContent = completion.choices[0]?.message?.content?.trim()
if (!generatedContent) {
logger.error(`[${requestId}] OpenAI response was empty or invalid.`)
return NextResponse.json(
{ success: false, error: 'Failed to generate content. OpenAI response was empty.' },
{ status: 500 }
)
}
logger.info(`[${requestId}] Code generation successful`, { generationType })
if (generationType === 'json-schema') {
try {
JSON.parse(generatedContent)
return NextResponse.json({ success: true, generatedContent })
} catch (parseError: any) {
logger.error(`[${requestId}] Generated JSON schema is invalid`, {
error: parseError.message,
content: generatedContent,
})
return NextResponse.json(
{ success: false, error: 'Generated JSON schema was invalid.' },
{ status: 500 }
)
}
} else {
return NextResponse.json({ success: true, generatedContent })
}
} catch (error: any) {
logger.error(`[${requestId}] Code generation failed`, {
error: error.message || 'Unknown error',
stack: error.stack,
})
// --- MODIFICATION: Use generic error message for client ---
let clientErrorMessage = 'Code generation failed. Please try again later.'
// Keep original message for server logging
let serverErrorMessage = error.message || 'Unknown error'
// --- END MODIFICATION ---
let status = 500
if (error instanceof OpenAI.APIError) {
status = error.status || 500
// --- MODIFICATION: Update server log message, keep client message generic ---
serverErrorMessage = error.message // Use specific API error for server logs
logger.error(`[${requestId}] OpenAI API Error: ${status} - ${serverErrorMessage}`)
// Optionally, customize client message based on status, but keep it generic
if (status === 401) {
clientErrorMessage = 'Authentication failed. Please check your API key configuration.'
} else if (status === 429) {
clientErrorMessage = 'Rate limit exceeded. Please try again later.'
} else if (status >= 500) {
clientErrorMessage =
'The code generation service is currently unavailable. Please try again later.'
}
// --- END MODIFICATION ---
}
return NextResponse.json(
{
success: false,
// --- MODIFICATION: Use generic client error message ---
error: clientErrorMessage,
// --- END MODIFICATION ---
},
{ status }
)
}
}

View File

@@ -165,6 +165,42 @@
.scrollbar-none::-webkit-scrollbar {
display: none;
}
/* Code Editor Streaming Effect */
@keyframes code-shimmer {
0% {
transform: translateX(-100%);
}
100% {
transform: translateX(100%);
}
}
.streaming-effect {
@apply relative overflow-hidden;
}
.streaming-effect::after {
content: '';
@apply pointer-events-none absolute left-0 top-0 h-full w-full;
background: linear-gradient(
90deg,
rgba(128, 128, 128, 0) 0%,
rgba(128, 128, 128, 0.1) 50%,
rgba(128, 128, 128, 0) 100%
);
animation: code-shimmer 1.5s infinite;
z-index: 10;
}
.dark .streaming-effect::after {
background: linear-gradient(
90deg,
rgba(180, 180, 180, 0) 0%,
rgba(180, 180, 180, 0.1) 50%,
rgba(180, 180, 180, 0) 100%
);
}
}
/* Dark mode error badge styling */
@@ -189,3 +225,23 @@ input[type='search']::-moz-search-cancel-button {
input[type='search']::-ms-clear {
display: none;
}
/* Code Prompt Bar Placeholder Animation */
@keyframes placeholder-pulse {
0%,
100% {
opacity: 0.5;
}
50% {
opacity: 0.8;
}
}
.loading-placeholder::placeholder {
animation: placeholder-pulse 1.5s ease-in-out infinite;
}
/* Dark mode error badge styling */
.dark .bg-red-500 {
@apply bg-red-700;
}

View File

@@ -0,0 +1,170 @@
import { SendIcon, XIcon } from 'lucide-react'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
import { cn } from '@/lib/utils'
interface CodePromptBarProps {
isVisible: boolean
isLoading: boolean
isStreaming: boolean
promptValue: string
onSubmit: (prompt: string) => void
onCancel: () => void
onChange: (value: string) => void
placeholder?: string
className?: string
}
export function CodePromptBar({
isVisible,
isLoading,
isStreaming,
promptValue,
onSubmit,
onCancel,
onChange,
placeholder = 'Describe the JavaScript code to generate...',
className,
}: CodePromptBarProps) {
if (!isVisible && !isStreaming) {
return null
}
return (
<div
className={cn(
'absolute -top-20 left-0 right-0',
'bg-background rounded-xl shadow-lg border',
'transition-all duration-200 z-9999999',
className
)}
>
<div className="flex items-center gap-2 p-2">
<div className={cn('status-indicator ml-1', isStreaming && 'streaming')} />
<div className="flex-1 relative">
<Input
value={isStreaming ? 'Generating...' : promptValue}
onChange={(e) => !isStreaming && onChange(e.target.value)}
placeholder={placeholder}
className={cn(
'rounded-xl border-0 focus-visible:ring-0 focus-visible:ring-offset-0 text-sm text-foreground placeholder:text-muted-foreground/50',
isStreaming && 'text-primary',
(isLoading || isStreaming) && 'loading-placeholder'
)}
onKeyDown={(e) => {
if (e.key === 'Enter' && !isLoading && !isStreaming && promptValue.trim()) {
onSubmit(promptValue)
} else if (e.key === 'Escape') {
onCancel()
}
}}
disabled={isLoading || isStreaming}
autoFocus={!isStreaming}
/>
{isStreaming && (
<div className="absolute inset-0 w-full h-full overflow-hidden pointer-events-none">
<div className="shimmer-effect" />
</div>
)}
</div>
<Button
variant="ghost"
size="icon"
onClick={onCancel}
className="h-8 w-8 rounded-full text-muted-foreground hover:text-foreground hover:bg-accent/50"
>
<XIcon className="h-4 w-4" />
</Button>
{!isStreaming && (
<Button
variant="ghost"
size="icon"
onClick={() => onSubmit(promptValue)}
className="h-8 w-8 rounded-full text-primary hover:text-primary hover:bg-primary/10"
disabled={isLoading || isStreaming || !promptValue.trim()}
>
<SendIcon className="h-4 w-4" />
</Button>
)}
</div>
<style jsx global>{`
@keyframes shimmer {
0% {
transform: translateX(-100%);
}
100% {
transform: translateX(100%);
}
}
@keyframes smoke-pulse {
0%,
100% {
transform: scale(0.8);
opacity: 0.4;
}
50% {
transform: scale(1.1);
opacity: 0.8;
}
}
.status-indicator {
position: relative;
width: 16px;
height: 16px;
border-radius: 50%;
overflow: hidden;
background-color: hsl(var(--muted-foreground) / 0.5);
transition: background-color 0.3s ease;
}
.status-indicator.streaming {
background-color: transparent;
}
.status-indicator.streaming::before {
content: '';
position: absolute;
inset: 0;
border-radius: 50%;
background: radial-gradient(
circle,
hsl(var(--primary) / 0.7) 0%,
hsl(var(--primary) / 0.2) 60%,
transparent 80%
);
animation: smoke-pulse 1.8s ease-in-out infinite;
}
.shimmer-effect {
position: absolute;
top: 0;
left: 0;
width: 100%;
height: 100%;
background: linear-gradient(
90deg,
rgba(255, 255, 255, 0) 0%,
rgba(255, 255, 255, 0.4) 50%,
rgba(255, 255, 255, 0) 100%
);
animation: shimmer 2s infinite;
}
.dark .shimmer-effect {
background: linear-gradient(
90deg,
rgba(50, 50, 50, 0) 0%,
rgba(80, 80, 80, 0.4) 50%,
rgba(50, 50, 50, 0) 100%
);
}
`}</style>
</div>
)
}

View File

@@ -19,7 +19,7 @@ export function Toolbar() {
const filteredBlocks = !searchQuery.trim() ? getBlocksByCategory(activeTab) : getAllBlocks()
return filteredBlocks.filter((block) => {
if (block.type === 'starter' || block.hiddenFromSidebar) return false
if (block.type === 'starter' || block.hideFromToolbar) return false
return (
!searchQuery.trim() ||

View File

@@ -1,13 +1,17 @@
import { useEffect, useRef, useState } from 'react'
import type { ReactElement } from 'react'
import { SparklesIcon } from 'lucide-react'
import { highlight, languages } from 'prismjs'
import 'prismjs/components/prism-javascript'
import 'prismjs/themes/prism.css'
import Editor from 'react-simple-code-editor'
import { Button } from '@/components/ui/button'
import { checkEnvVarTrigger, EnvVarDropdown } from '@/components/ui/env-var-dropdown'
import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
import { createLogger } from '@/lib/logs/console-logger'
import { cn } from '@/lib/utils'
import { useCodeGeneration } from '@/hooks/use-code-generation'
import { CodePromptBar } from '../../../../code-prompt-bar/code-prompt-bar'
import { useSubBlockValue } from '../hooks/use-sub-block-value'
const logger = createLogger('Code')
@@ -17,6 +21,8 @@ interface CodeProps {
subBlockId: string
isConnecting: boolean
placeholder?: string
language?: 'javascript' | 'json'
generationType?: 'javascript-function-body' | 'json-schema'
}
if (typeof document !== 'undefined') {
@@ -42,10 +48,18 @@ export function Code({
subBlockId,
isConnecting,
placeholder = 'Write JavaScript...',
language = 'javascript',
generationType = 'javascript-function-body',
}: CodeProps) {
// Determine the AI prompt placeholder based on language
const aiPromptPlaceholder =
language === 'json'
? 'Describe the JSON schema to generate...'
: 'Describe the JavaScript code to generate...'
// State management
const [storeValue, setStoreValue] = useSubBlockValue(blockId, subBlockId)
const [code, setCode] = useState('')
const [code, setCode] = useState<string>('')
const [lineCount, setLineCount] = useState(1)
const [showTags, setShowTags] = useState(false)
const [showEnvVars, setShowEnvVars] = useState(false)
@@ -57,10 +71,51 @@ export function Code({
const editorRef = useRef<HTMLDivElement>(null)
// AI Code Generation Hook
const handleStreamStart = () => {
setCode('')
// Optionally clear the store value too, though handleStreamChunk will update it
// setStoreValue('')
}
const handleGeneratedContent = (generatedCode: string) => {
setCode(generatedCode)
setStoreValue(generatedCode)
}
// Handle streaming chunks directly into the editor
const handleStreamChunk = (chunk: string) => {
setCode((currentCode) => {
const newCode = currentCode + chunk
setStoreValue(newCode)
return newCode
})
}
const {
isLoading: isAiLoading,
isStreaming: isAiStreaming,
generate: generateCode,
generateStream: generateCodeStream,
cancelGeneration,
isPromptVisible,
showPromptInline,
hidePromptInline,
promptInputValue,
updatePromptValue,
} = useCodeGeneration({
generationType: generationType,
initialContext: code,
onGeneratedContent: handleGeneratedContent,
onStreamChunk: handleStreamChunk,
onStreamStart: handleStreamStart,
})
// Effects
useEffect(() => {
if (storeValue !== null) {
setCode(storeValue.toString())
const valueString = storeValue?.toString() ?? ''
if (valueString !== code) {
setCode(valueString)
}
}, [storeValue])
@@ -74,18 +129,21 @@ export function Code({
const lines = code.split('\n')
const newVisualLineHeights: number[] = []
const container = document.createElement('div')
container.style.cssText = `
const tempContainer = document.createElement('div')
tempContainer.style.cssText = `
position: absolute;
visibility: hidden;
height: auto;
width: ${preElement.clientWidth}px;
font-family: ${window.getComputedStyle(preElement).fontFamily};
font-size: ${window.getComputedStyle(preElement).fontSize};
line-height: 21px;
padding: 12px;
white-space: pre-wrap;
word-break: break-word;
box-sizing: border-box;
`
document.body.appendChild(container)
document.body.appendChild(tempContainer)
lines.forEach((line) => {
const lineDiv = document.createElement('div')
@@ -96,7 +154,6 @@ export function Code({
const span = document.createElement('span')
span.textContent = part
if (part.startsWith('<') && part.endsWith('>')) {
span.style.color = 'rgb(153, 0, 85)'
}
lineDiv.appendChild(span)
})
@@ -104,22 +161,29 @@ export function Code({
lineDiv.textContent = line || ' '
}
container.appendChild(lineDiv)
tempContainer.appendChild(lineDiv)
const actualHeight = lineDiv.getBoundingClientRect().height
const lineUnits = Math.ceil(actualHeight / 21)
const lineUnits = Math.max(1, Math.ceil(actualHeight / 21))
newVisualLineHeights.push(lineUnits)
container.removeChild(lineDiv)
tempContainer.removeChild(lineDiv)
})
document.body.removeChild(container)
document.body.removeChild(tempContainer)
setVisualLineHeights(newVisualLineHeights)
setLineCount(newVisualLineHeights.reduce((sum, height) => sum + height, 0))
}
const resizeObserver = new ResizeObserver(calculateVisualLines)
resizeObserver.observe(editorRef.current)
const timeoutId = setTimeout(calculateVisualLines, 50)
return () => resizeObserver.disconnect()
const resizeObserver = new ResizeObserver(calculateVisualLines)
if (editorRef.current) {
resizeObserver.observe(editorRef.current)
}
return () => {
clearTimeout(timeoutId)
resizeObserver.disconnect()
}
}, [code])
// Handlers
@@ -135,18 +199,19 @@ export function Code({
setCode(newValue)
setStoreValue(newValue)
setCursorPosition(dropPosition + 1)
setShowTags(true)
const newCursorPosition = dropPosition + 1
setCursorPosition(newCursorPosition)
setShowTags(true)
if (data.connectionData?.sourceBlockId) {
setActiveSourceBlockId(data.connectionData.sourceBlockId)
}
setTimeout(() => {
if (textarea) {
textarea.selectionStart = dropPosition + 1
textarea.selectionEnd = dropPosition + 1
textarea.focus()
textarea.selectionStart = newCursorPosition
textarea.selectionEnd = newCursorPosition
}
}, 0)
} catch (error) {
@@ -159,12 +224,20 @@ export function Code({
setStoreValue(newValue)
setShowTags(false)
setActiveSourceBlockId(null)
setTimeout(() => {
editorRef.current?.querySelector('textarea')?.focus()
}, 0)
}
const handleEnvVarSelect = (newValue: string) => {
setCode(newValue)
setStoreValue(newValue)
setShowEnvVars(false)
setTimeout(() => {
editorRef.current?.querySelector('textarea')?.focus()
}, 0)
}
// Render helpers
@@ -172,12 +245,17 @@ export function Code({
const numbers: ReactElement[] = []
let lineNumber = 1
visualLineHeights.forEach((height) => {
for (let i = 0; i < height; i++) {
visualLineHeights.forEach((height, index) => {
numbers.push(
<div key={`${lineNumber}-0`} className={cn('text-xs text-muted-foreground leading-[21px]')}>
{lineNumber}
</div>
)
for (let i = 1; i < height; i++) {
numbers.push(
<div
key={`${lineNumber}-${i}`}
className={cn('text-xs text-muted-foreground leading-[21px]', i > 0 && 'invisible')}
className={cn('text-xs text-muted-foreground leading-[21px] invisible')}
>
{lineNumber}
</div>
@@ -186,125 +264,171 @@ export function Code({
lineNumber++
})
if (numbers.length === 0) {
numbers.push(
<div key="1-0" className={cn('text-xs text-muted-foreground leading-[21px]')}>
1
</div>
)
}
return numbers
}
return (
<div
className={cn(
'relative min-h-[100px] rounded-md border bg-background font-mono text-sm group',
isConnecting && 'ring-2 ring-blue-500 ring-offset-2'
)}
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
>
{code.split('\n').length > 5 && (
<button
onClick={() => setIsCollapsed(!isCollapsed)}
className={cn(
'absolute right-2 top-2 z-10 p-1.5 rounded-md',
'bg-accent/50 hover:bg-accent text-muted-foreground hover:text-foreground',
'opacity-0 group-hover:opacity-100 transition-opacity',
'text-xs font-medium'
)}
>
{isCollapsed ? 'Expand' : 'Collapse'}
</button>
)}
<div
className="absolute left-0 top-0 bottom-0 w-[30px] bg-muted/30 flex flex-col items-end pr-3 pt-3 select-none overflow-hidden"
aria-hidden="true"
>
{renderLineNumbers()}
</div>
<>
<CodePromptBar
isVisible={isPromptVisible}
isLoading={isAiLoading}
isStreaming={isAiStreaming}
promptValue={promptInputValue}
onSubmit={(prompt: string) => generateCodeStream({ prompt, context: code })}
onCancel={isAiStreaming ? cancelGeneration : hidePromptInline}
onChange={updatePromptValue}
placeholder={aiPromptPlaceholder}
/>
<div
className={cn(
'pl-[30px] pt-0 mt-0 relative',
isCollapsed && 'max-h-[126px] overflow-hidden'
'relative min-h-[100px] rounded-md border bg-background font-mono text-sm group',
isConnecting && 'ring-2 ring-blue-500 ring-offset-2'
)}
ref={editorRef}
onDragOver={(e) => e.preventDefault()}
onDrop={handleDrop}
>
{code.length === 0 && (
<div className="absolute left-[42px] top-[12px] text-muted-foreground/50 select-none pointer-events-none">
{placeholder}
</div>
)}
<Editor
value={code}
onValueChange={(newCode) => {
if (!isCollapsed) {
setCode(newCode)
setStoreValue(newCode)
const textarea = editorRef.current?.querySelector('textarea')
if (textarea) {
const pos = textarea.selectionStart
setCursorPosition(pos)
const tagTrigger = checkTagTrigger(newCode, pos)
setShowTags(tagTrigger.show)
if (!tagTrigger.show) {
setActiveSourceBlockId(null)
}
const envVarTrigger = checkEnvVarTrigger(newCode, pos)
setShowEnvVars(envVarTrigger.show)
setSearchTerm(envVarTrigger.show ? envVarTrigger.searchTerm : '')
}
}
}}
onKeyDown={(e) => {
if (e.key === 'Escape') {
setShowTags(false)
setShowEnvVars(false)
}
}}
highlight={(code) => highlight(code, languages.javascript, 'javascript')}
padding={12}
style={{
fontFamily: 'inherit',
minHeight: '46px',
lineHeight: '21px',
}}
className={cn('focus:outline-none', isCollapsed && 'pointer-events-none select-none')}
textareaClassName={cn(
'focus:outline-none focus:ring-0 bg-transparent',
isCollapsed && 'pointer-events-none'
<div className="absolute right-3 top-2 z-10 flex items-center gap-1 opacity-0 group-hover:opacity-100 transition-opacity">
{!isCollapsed && !isAiStreaming && (
<Button
variant="ghost"
size="icon"
onClick={isPromptVisible ? hidePromptInline : showPromptInline}
disabled={isAiLoading || isAiStreaming}
aria-label="Generate code with AI"
className="h-8 w-8 text-muted-foreground hover:text-foreground"
>
<SparklesIcon className="h-4 w-4" />
</Button>
)}
/>
{showEnvVars && (
<EnvVarDropdown
visible={showEnvVars}
onSelect={handleEnvVarSelect}
searchTerm={searchTerm}
inputValue={code}
cursorPosition={cursorPosition}
onClose={() => {
setShowEnvVars(false)
setSearchTerm('')
}}
/>
)}
{code.split('\n').length > 5 && !isAiStreaming && (
<Button
variant="ghost"
size="sm"
onClick={() => setIsCollapsed(!isCollapsed)}
aria-label={isCollapsed ? 'Expand code' : 'Collapse code'}
className="h-8 px-2 text-muted-foreground hover:text-foreground"
>
<span className="text-xs">{isCollapsed ? 'Expand' : 'Collapse'}</span>
</Button>
)}
</div>
{showTags && (
<TagDropdown
visible={showTags}
onSelect={handleTagSelect}
blockId={blockId}
activeSourceBlockId={activeSourceBlockId}
inputValue={code}
cursorPosition={cursorPosition}
onClose={() => {
setShowTags(false)
setActiveSourceBlockId(null)
<div
className="absolute left-0 top-0 bottom-0 w-[30px] bg-muted/30 flex flex-col items-end pr-3 pt-3 select-none overflow-hidden"
aria-hidden="true"
>
{renderLineNumbers()}
</div>
<div
className={cn(
'pl-[30px] pt-0 mt-0 relative',
isCollapsed && 'max-h-[126px] overflow-hidden',
isAiStreaming && 'streaming-effect'
)}
ref={editorRef}
>
{code.length === 0 && !isCollapsed && (
<div className="absolute left-[42px] top-[12px] text-muted-foreground/50 select-none pointer-events-none">
{placeholder}
</div>
)}
<Editor
value={code}
onValueChange={(newCode) => {
if (!isCollapsed && !isAiStreaming) {
setCode(newCode)
setStoreValue(newCode)
const textarea = editorRef.current?.querySelector('textarea')
if (textarea) {
const pos = textarea.selectionStart
setCursorPosition(pos)
const tagTrigger = checkTagTrigger(newCode, pos)
setShowTags(tagTrigger.show)
if (!tagTrigger.show) {
setActiveSourceBlockId(null)
}
const envVarTrigger = checkEnvVarTrigger(newCode, pos)
setShowEnvVars(envVarTrigger.show)
setSearchTerm(envVarTrigger.show ? envVarTrigger.searchTerm : '')
}
}
}}
onKeyDown={(e) => {
if (e.key === 'Escape') {
setShowTags(false)
setShowEnvVars(false)
}
if (isAiStreaming) {
e.preventDefault()
}
}}
highlight={(codeToHighlight) =>
highlight(codeToHighlight, languages[language], language)
}
padding={12}
style={{
fontFamily: 'inherit',
fontSize: 'inherit',
minHeight: isCollapsed ? '0px' : '106px',
lineHeight: '21px',
outline: 'none',
}}
className={cn(
'code-editor-area caret-primary',
'bg-transparent focus:outline-none',
(isCollapsed || isAiStreaming) && 'opacity-50 cursor-not-allowed'
)}
textareaClassName={cn(
'focus:outline-none focus:ring-0 border-none bg-transparent resize-none',
(isCollapsed || isAiStreaming) && 'pointer-events-none'
)}
/>
)}
{showEnvVars && !isCollapsed && !isAiStreaming && (
<EnvVarDropdown
visible={showEnvVars}
onSelect={handleEnvVarSelect}
searchTerm={searchTerm}
inputValue={code}
cursorPosition={cursorPosition}
onClose={() => {
setShowEnvVars(false)
setSearchTerm('')
}}
/>
)}
{showTags && !isCollapsed && !isAiStreaming && (
<TagDropdown
visible={showTags}
onSelect={handleTagSelect}
blockId={blockId}
activeSourceBlockId={activeSourceBlockId}
inputValue={code}
cursorPosition={cursorPosition}
onClose={() => {
setShowTags(false)
setActiveSourceBlockId(null)
}}
/>
)}
</div>
</div>
</div>
</>
)
}

View File

@@ -16,6 +16,7 @@ interface CodeEditorProps {
minHeight?: string
highlightVariables?: boolean
onKeyDown?: (e: React.KeyboardEvent) => void
disabled?: boolean
}
export function CodeEditor({
@@ -27,6 +28,7 @@ export function CodeEditor({
minHeight = '360px',
highlightVariables = true,
onKeyDown,
disabled = false,
}: CodeEditorProps) {
const [code, setCode] = useState(value)
const [visualLineHeights, setVisualLineHeights] = useState<number[]>([])
@@ -198,6 +200,7 @@ export function CodeEditor({
onKeyDown={onKeyDown}
highlight={(code) => customHighlight(code)}
padding={12}
disabled={disabled}
style={{
fontFamily: 'inherit',
minHeight: '46px',
@@ -206,7 +209,7 @@ export function CodeEditor({
className={cn('focus:outline-none', isCollapsed && 'pointer-events-none select-none')}
textareaClassName={cn(
'focus:outline-none focus:ring-0 bg-transparent',
isCollapsed && 'pointer-events-none'
(isCollapsed || disabled) && 'pointer-events-none'
)}
/>
</div>

View File

@@ -1,5 +1,5 @@
import { useEffect, useMemo, useRef, useState } from 'react'
import { Code, FileJson, X } from 'lucide-react'
import { Code, FileJson, SparklesIcon, X } from 'lucide-react'
import { Button } from '@/components/ui/button'
import {
Dialog,
@@ -15,6 +15,8 @@ import { checkTagTrigger, TagDropdown } from '@/components/ui/tag-dropdown'
import { createLogger } from '@/lib/logs/console-logger'
import { cn } from '@/lib/utils'
import { useCustomToolsStore } from '@/stores/custom-tools/store'
import { useCodeGeneration } from '@/hooks/use-code-generation'
import { CodePromptBar } from '../../../../../../../code-prompt-bar/code-prompt-bar'
import { CodeEditor } from '../code-editor/code-editor'
const logger = createLogger('CustomToolModal')
@@ -57,6 +59,41 @@ export function CustomToolModal({
const [isEditing, setIsEditing] = useState(false)
const [toolId, setToolId] = useState<string | undefined>(undefined)
// AI Code Generation Hooks
const schemaGeneration = useCodeGeneration({
generationType: 'json-schema',
onGeneratedContent: (content) => {
handleJsonSchemaChange(content)
setSchemaError(null) // Clear error on successful generation
},
onStreamChunk: (chunk) => {
setJsonSchema((prev) => {
const newSchema = prev + chunk
// Clear error as soon as streaming starts
if (schemaError) setSchemaError(null)
return newSchema
})
},
})
const codeGeneration = useCodeGeneration({
generationType: 'javascript-function-body',
onGeneratedContent: (content) => {
handleFunctionCodeChange(content) // Use existing handler to also trigger dropdown checks
setCodeError(null) // Clear error on successful generation
},
onStreamChunk: (chunk) => {
setFunctionCode((prev) => {
const newCode = prev + chunk
// Use existing handler logic for consistency, though dropdowns might be disabled during streaming
handleFunctionCodeChange(newCode)
// Clear error as soon as streaming starts
if (codeError) setCodeError(null)
return newCode
})
},
})
// Environment variables and tags dropdown state
const [showEnvVars, setShowEnvVars] = useState(false)
const [showTags, setShowTags] = useState(false)
@@ -100,9 +137,17 @@ export function CustomToolModal({
setActiveSection('schema')
setIsEditing(false)
setToolId(undefined)
// Reset AI state as well
schemaGeneration.closePrompt()
schemaGeneration.hidePromptInline()
codeGeneration.closePrompt()
codeGeneration.hidePromptInline()
}
const handleClose = () => {
// Cancel any ongoing generation before closing
if (schemaGeneration.isStreaming) schemaGeneration.cancelGeneration()
if (codeGeneration.isStreaming) codeGeneration.cancelGeneration()
resetForm()
onOpenChange(false)
}
@@ -242,6 +287,8 @@ export function CustomToolModal({
}
const handleJsonSchemaChange = (value: string) => {
// Prevent updates during AI generation/streaming
if (schemaGeneration.isLoading || schemaGeneration.isStreaming) return
setJsonSchema(value)
if (schemaError) {
setSchemaError(null)
@@ -249,6 +296,16 @@ export function CustomToolModal({
}
const handleFunctionCodeChange = (value: string) => {
// Prevent updates during AI generation/streaming
if (codeGeneration.isLoading || codeGeneration.isStreaming) {
// We still need to update the state for streaming chunks, but skip dropdown logic
setFunctionCode(value)
if (codeError) {
setCodeError(null)
}
return
}
setFunctionCode(value)
if (codeError) {
setCodeError(null)
@@ -284,12 +341,12 @@ export function CustomToolModal({
// Check if we should show the environment variables dropdown
const envVarTrigger = checkEnvVarTrigger(value, pos)
setShowEnvVars(envVarTrigger.show)
setShowEnvVars(envVarTrigger.show && !codeGeneration.isStreaming) // Hide dropdown during streaming
setSearchTerm(envVarTrigger.show ? envVarTrigger.searchTerm : '')
// Check if we should show the tags dropdown
const tagTrigger = checkTagTrigger(value, pos)
setShowTags(tagTrigger.show)
setShowTags(tagTrigger.show && !codeGeneration.isStreaming) // Hide dropdown during streaming
if (!tagTrigger.show) {
setActiveSourceBlockId(null)
}
@@ -311,14 +368,42 @@ export function CustomToolModal({
// Handle key press events
const handleKeyDown = (e: React.KeyboardEvent) => {
// Only handle Escape directly if dropdowns aren't visible
// Otherwise, let the dropdowns handle their own keyboard events
if (e.key === 'Escape' && !showEnvVars && !showTags) {
setShowEnvVars(false)
setShowTags(false)
// Allow AI prompt interaction (e.g., Escape to close prompt bar)
// Check if AI prompt is visible for the current section
const isSchemaPromptVisible = activeSection === 'schema' && schemaGeneration.isPromptVisible
const isCodePromptVisible = activeSection === 'code' && codeGeneration.isPromptVisible
if (e.key === 'Escape') {
if (isSchemaPromptVisible) {
schemaGeneration.hidePromptInline()
e.preventDefault()
e.stopPropagation()
return
}
if (isCodePromptVisible) {
codeGeneration.hidePromptInline()
e.preventDefault()
e.stopPropagation()
return
}
// Close dropdowns only if AI prompt isn't active
if (!showEnvVars && !showTags) {
setShowEnvVars(false)
setShowTags(false)
}
}
// Don't handle other keys when dropdowns are visible
// Prevent regular input if streaming in the active section
if (activeSection === 'schema' && schemaGeneration.isStreaming) {
e.preventDefault()
return
}
if (activeSection === 'code' && codeGeneration.isStreaming) {
e.preventDefault()
return
}
// Let dropdowns handle their own keyboard events if visible
if (showEnvVars || showTags) {
if (['ArrowDown', 'ArrowUp', 'Enter'].includes(e.key)) {
e.preventDefault()
@@ -381,23 +466,89 @@ export function CustomToolModal({
))}
</div>
<div className="flex-1 px-6 pt-6 pb-12 overflow-auto">
<div className="relative flex-1 px-6 pt-6 pb-12 overflow-auto">
{/* Schema Section AI Prompt Bar */}
{activeSection === 'schema' && (
<>
<CodePromptBar
isVisible={schemaGeneration.isPromptVisible}
isLoading={schemaGeneration.isLoading}
isStreaming={schemaGeneration.isStreaming}
promptValue={schemaGeneration.promptInputValue}
onSubmit={(prompt: string) =>
schemaGeneration.generateStream({ prompt, context: jsonSchema })
}
onCancel={
schemaGeneration.isStreaming
? schemaGeneration.cancelGeneration
: schemaGeneration.hidePromptInline
}
onChange={schemaGeneration.updatePromptValue}
placeholder="Describe the JSON schema to generate..."
className="relative mb-2 !top-0"
/>
</>
)}
{/* Code Section AI Prompt Bar */}
{activeSection === 'code' && (
<>
<CodePromptBar
isVisible={codeGeneration.isPromptVisible}
isLoading={codeGeneration.isLoading}
isStreaming={codeGeneration.isStreaming}
promptValue={codeGeneration.promptInputValue}
onSubmit={(prompt: string) =>
codeGeneration.generateStream({ prompt, context: functionCode })
}
onCancel={
codeGeneration.isStreaming
? codeGeneration.cancelGeneration
: codeGeneration.hidePromptInline
}
onChange={codeGeneration.updatePromptValue}
placeholder="Describe the JavaScript code to generate..."
className="relative mb-2 !top-0"
/>
</>
)}
<div
className={cn(
'flex-1 flex flex-col h-full',
activeSection === 'schema' ? 'block' : 'hidden'
)}
>
<div className="flex items-center mb-1 min-h-6">
<div className="flex items-center justify-between mb-1 min-h-6">
<div className="flex items-center gap-2">
<FileJson className="h-4 w-4" />
<Label htmlFor="json-schema" className="font-medium">
JSON Schema
</Label>
<Button
variant="ghost"
size="icon"
className="h-5 w-5 p-0 text-muted-foreground hover:text-foreground"
onClick={() => {
logger.debug('Schema AI button clicked')
logger.debug(
'showPromptInline function exists:',
typeof schemaGeneration.showPromptInline === 'function'
)
schemaGeneration.isPromptVisible
? schemaGeneration.hidePromptInline()
: schemaGeneration.showPromptInline()
}}
disabled={schemaGeneration.isLoading || schemaGeneration.isStreaming}
aria-label="Generate schema with AI"
>
<SparklesIcon className="h-3 w-3" />
</Button>
</div>
{schemaError && (
<span className="text-sm text-red-600 ml-4 flex-shrink-0">{schemaError}</span>
)}
{schemaError &&
!schemaGeneration.isStreaming && ( // Hide schema error while streaming
<span className="text-sm text-red-600 ml-4 flex-shrink-0">{schemaError}</span>
)}
</div>
<CodeEditor
value={jsonSchema}
@@ -421,7 +572,13 @@ export function CustomToolModal({
}
}`}
minHeight="340px"
className={cn(schemaError ? 'border-red-500' : '')}
className={cn(
schemaError && !schemaGeneration.isStreaming ? 'border-red-500' : '',
(schemaGeneration.isLoading || schemaGeneration.isStreaming) &&
'opacity-50 cursor-not-allowed'
)}
disabled={schemaGeneration.isLoading || schemaGeneration.isStreaming} // Use disabled prop instead of readOnly
onKeyDown={handleKeyDown} // Pass keydown handler
/>
<div className="h-6"></div>
</div>
@@ -432,16 +589,36 @@ export function CustomToolModal({
activeSection === 'code' ? 'block' : 'hidden'
)}
>
<div className="flex items-center mb-1 min-h-6">
<div className="flex items-center justify-between mb-1 min-h-6">
<div className="flex items-center gap-2">
<Code className="h-4 w-4" />
<Label htmlFor="function-code" className="font-medium">
Code (optional)
</Label>
<Button
variant="ghost"
size="icon"
className="h-5 w-5 p-0 text-muted-foreground hover:text-foreground"
onClick={() => {
logger.debug('Code AI button clicked')
logger.debug(
'showPromptInline function exists:',
typeof codeGeneration.showPromptInline === 'function'
)
codeGeneration.isPromptVisible
? codeGeneration.hidePromptInline()
: codeGeneration.showPromptInline()
}}
disabled={codeGeneration.isLoading || codeGeneration.isStreaming}
aria-label="Generate code with AI"
>
<SparklesIcon className="h-3 w-3" />
</Button>
</div>
{codeError && (
<span className="text-sm text-red-600 ml-4 flex-shrink-0">{codeError}</span>
)}
{codeError &&
!codeGeneration.isStreaming && ( // Hide code error while streaming
<span className="text-sm text-red-600 ml-4 flex-shrink-0">{codeError}</span>
)}
</div>
<div ref={codeEditorRef} className="relative">
<CodeEditor
@@ -450,9 +627,14 @@ export function CustomToolModal({
language="javascript"
placeholder={`// This code will be executed when the tool is called. You can use environment variables with {{VARIABLE_NAME}}.`}
minHeight="340px"
className={cn(codeError ? 'border-red-500' : '')}
className={cn(
codeError && !codeGeneration.isStreaming ? 'border-red-500' : '',
(codeGeneration.isLoading || codeGeneration.isStreaming) &&
'opacity-50 cursor-not-allowed'
)}
highlightVariables={true}
onKeyDown={handleKeyDown}
disabled={codeGeneration.isLoading || codeGeneration.isStreaming} // Use disabled prop instead of readOnly
onKeyDown={handleKeyDown} // Pass keydown handler
/>
{/* Environment variables dropdown */}

View File

@@ -100,6 +100,8 @@ export function SubBlock({ blockId, config, isConnecting }: SubBlockProps) {
subBlockId={config.id}
isConnecting={isConnecting}
placeholder={config.placeholder}
language={config.language}
generationType={config.generationType}
/>
)
case 'switch':

View File

@@ -104,6 +104,8 @@ export const AgentBlock: BlockConfig<AgentResponse> = {
type: 'code',
layout: 'full',
placeholder: `Enter JSON schema...`,
language: 'json',
generationType: 'json-schema',
},
],
tools: {

View File

@@ -17,7 +17,7 @@ export const ThinkingBlock: BlockConfig<ThinkingToolResponse> = {
category: 'tools',
bgColor: '#181C1E',
icon: BrainIcon,
hiddenFromSidebar: true,
hideFromToolbar: true,
subBlocks: [
{

View File

@@ -106,6 +106,9 @@ export interface SubBlockConfig {
value: string | number | boolean | Array<string | number | boolean>
}
}
// Props specific to 'code' sub-block type
language?: 'javascript' | 'json'
generationType?: 'javascript-function-body' | 'json-schema'
// OAuth specific properties
provider?: string
serviceId?: string
@@ -152,7 +155,7 @@ export interface BlockConfig<T extends ToolResponse = ToolResponse> {
}
}
}
hiddenFromSidebar?: boolean
hideFromToolbar?: boolean
}
// Output configuration rules

View File

@@ -0,0 +1,304 @@
import { useCallback, useRef, useState } from 'react'
import { createLogger } from '@/lib/logs/console-logger'
import { useNotificationStore } from '@/stores/notifications/store'
interface ChatMessage {
role: 'user' | 'assistant' | 'system'
content: string
}
type GenerationType = 'json-schema' | 'javascript-function-body' | 'typescript-function-body'
interface UseCodeGenerationProps {
generationType: GenerationType
initialContext?: string // Optional initial code/schema
onGeneratedContent: (content: string) => void
onStreamChunk?: (chunk: string) => void
onStreamStart?: () => void
onGenerationComplete?: (prompt: string, generatedContent: string) => void // New callback
}
interface GenerateOptions {
prompt: string
context?: string // Overrides initialContext if provided
}
const logger = createLogger('useCodeGeneration')
export function useCodeGeneration({
generationType,
initialContext = '',
onGeneratedContent,
onStreamChunk,
onStreamStart,
onGenerationComplete,
}: UseCodeGenerationProps) {
const [isLoading, setIsLoading] = useState(false)
const [isPromptOpen, setIsPromptOpen] = useState(false)
const [isPromptVisible, setIsPromptVisible] = useState(false)
const [promptInputValue, setPromptInputValue] = useState('')
const [error, setError] = useState<string | null>(null)
const [isStreaming, setIsStreaming] = useState(false)
const addNotification = useNotificationStore((state) => state.addNotification)
// State for conversation history
const [conversationHistory, setConversationHistory] = useState<ChatMessage[]>([])
// Use useRef for the abort controller
const abortControllerRef = useRef<AbortController | null>(null)
// Standard non-streaming generation
const generate = async ({ prompt, context }: GenerateOptions) => {
console.log('[useCodeGeneration.ts] generate function called')
if (!prompt) {
const errorMessage = 'Prompt cannot be empty.'
setError(errorMessage)
addNotification('error', errorMessage, null)
return
}
setIsLoading(true)
setError(null)
logger.debug('Starting code generation', { generationType, prompt })
setPromptInputValue('')
// Keep track of the current prompt for history
const currentPrompt = prompt
try {
const response = await fetch('/api/codegen', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
prompt,
generationType,
context: context ?? initialContext, // Use override context if available
history: conversationHistory, // Send history
}),
})
const result = await response.json()
if (!response.ok || !result.success) {
throw new Error(result.error || `HTTP error! status: ${response.status}`)
}
logger.info('Code generation successful', { generationType })
onGeneratedContent(result.generatedContent)
addNotification('info', 'Content generated successfully!', null)
setIsPromptOpen(false)
setIsPromptVisible(false)
// Update history after successful non-streaming generation
setConversationHistory((prevHistory) => [
...prevHistory,
{ role: 'user', content: currentPrompt },
{ role: 'assistant', content: result.generatedContent },
])
if (onGenerationComplete) {
onGenerationComplete(currentPrompt, result.generatedContent)
}
} catch (err: any) {
const errorMessage = err.message || 'An unknown error occurred during generation.'
logger.error('Code generation failed', { error: errorMessage })
setError(errorMessage)
addNotification('error', `Generation failed: ${errorMessage}`, null)
} finally {
setIsLoading(false)
}
}
// Streaming generation
const generateStream = async ({ prompt, context }: GenerateOptions) => {
if (!prompt) {
const errorMessage = 'Prompt cannot be empty.'
setError(errorMessage)
addNotification('error', errorMessage, null)
return
}
setIsLoading(true)
setIsStreaming(true)
setError(null)
setPromptInputValue('')
// Keep track of the current prompt for history
const currentPrompt = prompt
// Create a new AbortController for this request
abortControllerRef.current = new AbortController()
logger.debug('Starting streaming code generation', { generationType, prompt })
try {
const response = await fetch('/api/codegen', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
prompt,
generationType,
context: context ?? initialContext,
stream: true,
history: conversationHistory, // Send history
}),
signal: abortControllerRef.current.signal,
})
if (!response.ok) {
const errorText = await response.text()
throw new Error(errorText || `HTTP error! status: ${response.status}`)
}
if (!response.body) {
throw new Error('Response body is null')
}
// Set up streaming reader
const reader = response.body.getReader()
const decoder = new TextDecoder()
let fullContent = ''
// Signal the start of the stream to clear previous content
if (onStreamStart) {
onStreamStart()
}
while (true) {
const { done, value } = await reader.read()
if (done) break
// Process incoming chunks
const text = decoder.decode(value)
const lines = text.split('\n').filter((line) => line.trim() !== '')
for (const line of lines) {
try {
const data = JSON.parse(line)
// Check if there's an error
if (data.error) {
throw new Error(data.error)
}
// Process chunk
if (data.chunk) {
fullContent += data.chunk
if (onStreamChunk) {
onStreamChunk(data.chunk)
}
}
// Check if streaming is complete
if (data.done) {
// Use full content from server if available (for validation)
if (data.fullContent) {
fullContent = data.fullContent
}
logger.info('Streaming code generation completed', { generationType })
// Update history AFTER the stream is fully complete
setConversationHistory((prevHistory) => [
...prevHistory,
{ role: 'user', content: currentPrompt },
{ role: 'assistant', content: fullContent }, // Use the final full content
])
// Call the main handler for the complete content
onGeneratedContent(fullContent)
if (onGenerationComplete) {
onGenerationComplete(currentPrompt, fullContent)
}
addNotification('info', 'Content generated successfully!', null)
break
}
} catch (jsonError: any) {
logger.error('Failed to parse streaming response', { error: jsonError.message, line })
}
}
}
} catch (err: any) {
// Don't show error if it was due to an abort
if (err.name === 'AbortError') {
logger.info('Streaming code generation aborted', { generationType })
return
}
const errorMessage = err.message || 'An unknown error occurred during streaming.'
logger.error('Streaming code generation failed', { error: errorMessage })
setError(errorMessage)
addNotification('error', `Generation failed: ${errorMessage}`, null)
} finally {
setIsLoading(false)
setIsStreaming(false)
abortControllerRef.current = null
}
}
const cancelGeneration = () => {
if (abortControllerRef.current) {
abortControllerRef.current.abort()
abortControllerRef.current = null
setIsLoading(false)
setIsStreaming(false)
logger.info('Code generation canceled', { generationType })
}
}
const openPrompt = () => {
setIsPromptOpen(true)
setPromptInputValue('')
}
const closePrompt = () => {
if (isLoading) return
setIsPromptOpen(false)
setPromptInputValue('')
}
const showPromptInline = () => {
logger.debug('showPromptInline called', { generationType })
setIsPromptVisible(true)
setPromptInputValue('')
}
const hidePromptInline = () => {
logger.debug('hidePromptInline called', { generationType })
if (isLoading) return
setIsPromptVisible(false)
setPromptInputValue('')
}
const updatePromptValue = (value: string) => {
setPromptInputValue(value)
}
const clearHistory = useCallback(() => {
setConversationHistory([])
logger.info('Conversation history cleared', { generationType })
}, [generationType])
return {
isLoading,
isStreaming,
error,
generate,
generateStream,
cancelGeneration,
isPromptOpen,
openPrompt,
closePrompt,
isPromptVisible,
showPromptInline,
hidePromptInline,
promptInputValue,
updatePromptValue,
conversationHistory,
clearHistory,
}
}

202
sim/package-lock.json generated
View File

@@ -36,6 +36,7 @@
"@radix-ui/react-tooltip": "^1.1.6",
"@react-email/components": "^0.0.34",
"@vercel/og": "^0.6.5",
"ai": "^4.3.2",
"better-auth": "^1.2.5-beta.5",
"browser-image-compression": "^2.0.2",
"class-variance-authority": "^0.7.1",
@@ -56,7 +57,7 @@
"mammoth": "^1.9.0",
"next": "^15.2.4",
"next-themes": "^0.4.6",
"openai": "^4.89.0",
"openai": "^4.91.1",
"pdf-parse": "^1.1.1",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",
@@ -107,6 +108,76 @@
"dev": true,
"license": "MIT"
},
"node_modules/@ai-sdk/provider": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider/-/provider-1.1.0.tgz",
"integrity": "sha512-0M+qjp+clUD0R1E5eWQFhxEvWLNaOtGQRUaBn8CUABnSKredagq92hUS9VjOzGsTm37xLfpaxl97AVtbeOsHew==",
"license": "Apache-2.0",
"dependencies": {
"json-schema": "^0.4.0"
},
"engines": {
"node": ">=18"
}
},
"node_modules/@ai-sdk/provider-utils": {
"version": "2.2.4",
"resolved": "https://registry.npmjs.org/@ai-sdk/provider-utils/-/provider-utils-2.2.4.tgz",
"integrity": "sha512-13sEGBxB6kgaMPGOgCLYibF6r8iv8mgjhuToFrOTU09bBxbFQd8ZoARarCfJN6VomCUbUvMKwjTBLb1vQnN+WA==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.1.0",
"nanoid": "^3.3.8",
"secure-json-parse": "^2.7.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.23.8"
}
},
"node_modules/@ai-sdk/react": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@ai-sdk/react/-/react-1.2.6.tgz",
"integrity": "sha512-5BFChNbcYtcY9MBStcDev7WZRHf0NpTrk8yfSoedWctB3jfWkFd1HECBvdc8w3mUQshF2MumLHtAhRO7IFtGGQ==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider-utils": "2.2.4",
"@ai-sdk/ui-utils": "1.2.5",
"swr": "^2.2.5",
"throttleit": "2.1.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"react": "^18 || ^19 || ^19.0.0-rc",
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"zod": {
"optional": true
}
}
},
"node_modules/@ai-sdk/ui-utils": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/@ai-sdk/ui-utils/-/ui-utils-1.2.5.tgz",
"integrity": "sha512-XDgqnJcaCkDez7qolvk+PDbs/ceJvgkNkxkOlc9uDWqxfDJxtvCZ+14MP/1qr4IBwGIgKVHzMDYDXvqVhSWLzg==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.1.0",
"@ai-sdk/provider-utils": "2.2.4",
"zod-to-json-schema": "^3.24.1"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"zod": "^3.23.8"
}
},
"node_modules/@alloc/quick-lru": {
"version": "5.2.0",
"resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz",
@@ -2100,6 +2171,15 @@
"node": ">= 8"
}
},
"node_modules/@opentelemetry/api": {
"version": "1.9.0",
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz",
"integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==",
"license": "Apache-2.0",
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@peculiar/asn1-android": {
"version": "2.3.16",
"resolved": "https://registry.npmjs.org/@peculiar/asn1-android/-/asn1-android-2.3.16.tgz",
@@ -4892,6 +4972,12 @@
"@types/d3-selection": "*"
}
},
"node_modules/@types/diff-match-patch": {
"version": "1.0.36",
"resolved": "https://registry.npmjs.org/@types/diff-match-patch/-/diff-match-patch-1.0.36.tgz",
"integrity": "sha512-xFdR6tkm0MWvBfO8xXCSsinYxHcqkQUlcHeSpMC2ukzOb6lwQAfDmW+Qt0AvlGd8HpsS28qKsB+oPeJn9I39jg==",
"license": "MIT"
},
"node_modules/@types/estree": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz",
@@ -5293,6 +5379,32 @@
"node": ">= 8.0.0"
}
},
"node_modules/ai": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/ai/-/ai-4.3.2.tgz",
"integrity": "sha512-h643SfhKil0Pnxk2tVIazFDL1JevutUghvc3mOpWqJFMcudmgtwQYlvxCkwSfljrrq+qIfne8d6jCihMMhM7pw==",
"license": "Apache-2.0",
"dependencies": {
"@ai-sdk/provider": "1.1.0",
"@ai-sdk/provider-utils": "2.2.4",
"@ai-sdk/react": "1.2.6",
"@ai-sdk/ui-utils": "1.2.5",
"@opentelemetry/api": "1.9.0",
"jsondiffpatch": "0.6.0"
},
"engines": {
"node": ">=18"
},
"peerDependencies": {
"react": "^18 || ^19 || ^19.0.0-rc",
"zod": "^3.23.8"
},
"peerDependenciesMeta": {
"react": {
"optional": true
}
}
},
"node_modules/ansi-escapes": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz",
@@ -6434,7 +6546,6 @@
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
"integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
@@ -6473,6 +6584,12 @@
"node": ">=0.3.1"
}
},
"node_modules/diff-match-patch": {
"version": "1.0.5",
"resolved": "https://registry.npmjs.org/diff-match-patch/-/diff-match-patch-1.0.5.tgz",
"integrity": "sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==",
"license": "Apache-2.0"
},
"node_modules/diff-sequences": {
"version": "26.6.2",
"resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-26.6.2.tgz",
@@ -8226,6 +8343,12 @@
"integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
"license": "MIT"
},
"node_modules/json-schema": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.4.0.tgz",
"integrity": "sha512-es94M3nTIfsEPisRafak+HDLfHXnKBhV3vU5eqPcS3flIWqcxJWgXHXiey3YrpaNsanY5ei1VoYEbOzijuq9BA==",
"license": "(AFL-2.1 OR BSD-3-Clause)"
},
"node_modules/json5": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
@@ -8239,6 +8362,35 @@
"node": ">=6"
}
},
"node_modules/jsondiffpatch": {
"version": "0.6.0",
"resolved": "https://registry.npmjs.org/jsondiffpatch/-/jsondiffpatch-0.6.0.tgz",
"integrity": "sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==",
"license": "MIT",
"dependencies": {
"@types/diff-match-patch": "^1.0.36",
"chalk": "^5.3.0",
"diff-match-patch": "^1.0.5"
},
"bin": {
"jsondiffpatch": "bin/jsondiffpatch.js"
},
"engines": {
"node": "^18.0.0 || >=20.0.0"
}
},
"node_modules/jsondiffpatch/node_modules/chalk": {
"version": "5.4.1",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz",
"integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==",
"license": "MIT",
"engines": {
"node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"funding": {
"url": "https://github.com/chalk/chalk?sponsor=1"
}
},
"node_modules/jszip": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz",
@@ -9147,9 +9299,9 @@
}
},
"node_modules/openai": {
"version": "4.90.0",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.90.0.tgz",
"integrity": "sha512-YCuHMMycqtCg1B8G9ezkOF0j8UnBWD3Al/zYaelpuXwU1yhCEv+Y4n9G20MnyGy6cH4GsFwOMrgstQ+bgG1PtA==",
"version": "4.91.1",
"resolved": "https://registry.npmjs.org/openai/-/openai-4.91.1.tgz",
"integrity": "sha512-DbjrR0hIMQFbxz8+3qBsfPJnh3+I/skPgoSlT7f9eiZuhGBUissPQULNgx6gHNkLoZ3uS0uYS6eXPUdtg4nHzw==",
"license": "Apache-2.0",
"dependencies": {
"@types/node": "^18.11.18",
@@ -10831,6 +10983,12 @@
"loose-envify": "^1.1.0"
}
},
"node_modules/secure-json-parse": {
"version": "2.7.0",
"resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz",
"integrity": "sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==",
"license": "BSD-3-Clause"
},
"node_modules/selderee": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/selderee/-/selderee-0.11.0.tgz",
@@ -11493,6 +11651,19 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/swr": {
"version": "2.3.3",
"resolved": "https://registry.npmjs.org/swr/-/swr-2.3.3.tgz",
"integrity": "sha512-dshNvs3ExOqtZ6kJBaAsabhPdHyeY4P2cKwRCniDVifBMoG/SVI7tfLWqPXriVspf2Rg4tPzXJTnwaihIeFw2A==",
"license": "MIT",
"dependencies": {
"dequal": "^2.0.3",
"use-sync-external-store": "^1.4.0"
},
"peerDependencies": {
"react": "^16.11.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
}
},
"node_modules/symbol-tree": {
"version": "3.2.4",
"resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
@@ -11677,6 +11848,18 @@
"node": ">=0.8"
}
},
"node_modules/throttleit": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/throttleit/-/throttleit-2.1.0.tgz",
"integrity": "sha512-nt6AMGKW1p/70DF/hGBdJB57B8Tspmbp5gfJ8ilhLnt7kkr2ye7hzD6NVG8GGErk2HWF34igrL2CXmNIkzKqKw==",
"license": "MIT",
"engines": {
"node": ">=18"
},
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/tiny-inflate": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/tiny-inflate/-/tiny-inflate-1.0.3.tgz",
@@ -12701,6 +12884,15 @@
"url": "https://github.com/sponsors/colinhacks"
}
},
"node_modules/zod-to-json-schema": {
"version": "3.24.5",
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.5.tgz",
"integrity": "sha512-/AuWwMP+YqiPbsJx5D6TfgRTc4kTLjsh5SOcd4bLsfUg2RcEXrFMJl1DGgdHy2aCfsIA/cr/1JM0xcB2GZji8g==",
"license": "ISC",
"peerDependencies": {
"zod": "^3.24.1"
}
},
"node_modules/zustand": {
"version": "4.5.6",
"resolved": "https://registry.npmjs.org/zustand/-/zustand-4.5.6.tgz",

View File

@@ -50,6 +50,7 @@
"@radix-ui/react-tooltip": "^1.1.6",
"@react-email/components": "^0.0.34",
"@vercel/og": "^0.6.5",
"ai": "^4.3.2",
"better-auth": "^1.2.5-beta.5",
"browser-image-compression": "^2.0.2",
"class-variance-authority": "^0.7.1",
@@ -70,7 +71,7 @@
"mammoth": "^1.9.0",
"next": "^15.2.4",
"next-themes": "^0.4.6",
"openai": "^4.89.0",
"openai": "^4.91.1",
"pdf-parse": "^1.1.1",
"postgres": "^3.4.5",
"prismjs": "^1.30.0",