mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-10 07:27:57 -05:00
feat(import-export): improvements to export workspace, maintain file structure, include workflow variables (#1799)
* feat(import-export): improvements to export workspace, maintain file structure * fix type' * import/export variables * fix var ref id bug
This commit is contained in:
committed by
GitHub
parent
fb445b166d
commit
2eea3caccd
@@ -8,6 +8,11 @@ import { Popover, PopoverContent, PopoverTrigger } from '@/components/ui/popover
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import { generateFolderName } from '@/lib/naming'
|
||||
import { cn } from '@/lib/utils'
|
||||
import {
|
||||
extractWorkflowName,
|
||||
extractWorkflowsFromFiles,
|
||||
extractWorkflowsFromZip,
|
||||
} from '@/lib/workflows/import-export'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { useFolderStore } from '@/stores/folders/store'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff/store'
|
||||
@@ -114,80 +119,6 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
}
|
||||
}, [createFolder, workspaceId, isCreating])
|
||||
|
||||
const handleDirectImport = useCallback(
|
||||
async (content: string, filename?: string) => {
|
||||
if (!content.trim()) {
|
||||
logger.error('JSON content is required')
|
||||
return
|
||||
}
|
||||
|
||||
setIsImporting(true)
|
||||
|
||||
try {
|
||||
// First validate the JSON without importing
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(content)
|
||||
|
||||
if (!workflowData || parseErrors.length > 0) {
|
||||
logger.error('Failed to parse JSON:', { errors: parseErrors })
|
||||
return
|
||||
}
|
||||
|
||||
// Generate workflow name from filename or fallback to time-based name
|
||||
const getWorkflowName = () => {
|
||||
if (filename) {
|
||||
// Remove file extension and use the filename
|
||||
const nameWithoutExtension = filename.replace(/\.json$/i, '')
|
||||
return (
|
||||
nameWithoutExtension.trim() || `Imported Workflow - ${new Date().toLocaleString()}`
|
||||
)
|
||||
}
|
||||
return `Imported Workflow - ${new Date().toLocaleString()}`
|
||||
}
|
||||
|
||||
// Clear workflow diff store when creating a new workflow from import
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
// Create a new workflow
|
||||
const newWorkflowId = await createWorkflow({
|
||||
name: getWorkflowName(),
|
||||
description: 'Workflow imported from JSON',
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
// Save workflow state to database first
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error('Failed to persist imported workflow to database')
|
||||
throw new Error('Failed to save workflow')
|
||||
}
|
||||
|
||||
logger.info('Imported workflow persisted to database')
|
||||
|
||||
// Pre-load the workflow state before navigating
|
||||
const { setActiveWorkflow } = useWorkflowRegistry.getState()
|
||||
await setActiveWorkflow(newWorkflowId)
|
||||
|
||||
// Navigate to the new workflow (replace to avoid history entry)
|
||||
router.replace(`/workspace/${workspaceId}/w/${newWorkflowId}`)
|
||||
|
||||
logger.info('Workflow imported successfully from JSON')
|
||||
} catch (error) {
|
||||
logger.error('Failed to import workflow:', { error })
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
}
|
||||
},
|
||||
[createWorkflow, workspaceId, router]
|
||||
)
|
||||
|
||||
const handleImportWorkflow = useCallback(() => {
|
||||
setIsOpen(false)
|
||||
fileInputRef.current?.click()
|
||||
@@ -195,24 +126,195 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
|
||||
const handleFileChange = useCallback(
|
||||
async (event: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = event.target.files?.[0]
|
||||
if (!file) return
|
||||
const files = event.target.files
|
||||
if (!files || files.length === 0) return
|
||||
|
||||
setIsImporting(true)
|
||||
|
||||
try {
|
||||
const content = await file.text()
|
||||
const fileArray = Array.from(files)
|
||||
const hasZip = fileArray.some((f) => f.name.toLowerCase().endsWith('.zip'))
|
||||
const jsonFiles = fileArray.filter((f) => f.name.toLowerCase().endsWith('.json'))
|
||||
|
||||
// Import directly with filename
|
||||
await handleDirectImport(content, file.name)
|
||||
let importedWorkflows: Array<{ content: string; name: string; folderPath: string[] }> = []
|
||||
|
||||
if (hasZip && fileArray.length === 1) {
|
||||
const zipFile = fileArray[0]
|
||||
const { workflows: extractedWorkflows, metadata } = await extractWorkflowsFromZip(zipFile)
|
||||
importedWorkflows = extractedWorkflows
|
||||
|
||||
const { createFolder } = useFolderStore.getState()
|
||||
const folderName = metadata?.workspaceName || zipFile.name.replace(/\.zip$/i, '')
|
||||
const importFolder = await createFolder({
|
||||
name: folderName,
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const folderMap = new Map<string, string>()
|
||||
|
||||
for (const workflow of importedWorkflows) {
|
||||
try {
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(
|
||||
workflow.content
|
||||
)
|
||||
|
||||
if (!workflowData || parseErrors.length > 0) {
|
||||
logger.warn(`Failed to parse ${workflow.name}:`, parseErrors)
|
||||
continue
|
||||
}
|
||||
|
||||
let targetFolderId = importFolder.id
|
||||
|
||||
if (workflow.folderPath.length > 0) {
|
||||
const folderPathKey = workflow.folderPath.join('/')
|
||||
|
||||
if (!folderMap.has(folderPathKey)) {
|
||||
let parentId = importFolder.id
|
||||
|
||||
for (let i = 0; i < workflow.folderPath.length; i++) {
|
||||
const pathSegment = workflow.folderPath.slice(0, i + 1).join('/')
|
||||
|
||||
if (!folderMap.has(pathSegment)) {
|
||||
const subFolder = await createFolder({
|
||||
name: workflow.folderPath[i],
|
||||
workspaceId,
|
||||
parentId,
|
||||
})
|
||||
folderMap.set(pathSegment, subFolder.id)
|
||||
parentId = subFolder.id
|
||||
} else {
|
||||
parentId = folderMap.get(pathSegment)!
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
targetFolderId = folderMap.get(folderPathKey)!
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(workflow.content)
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
const newWorkflowId = await createWorkflow({
|
||||
name: workflowName,
|
||||
description: 'Imported from workspace export',
|
||||
workspaceId,
|
||||
folderId: targetFolderId,
|
||||
})
|
||||
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`Failed to save imported workflow ${newWorkflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (workflowData.variables && workflowData.variables.length > 0) {
|
||||
const variablesPayload = workflowData.variables.map((v: any) => ({
|
||||
id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
|
||||
workflowId: newWorkflowId,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
|
||||
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ variables: variablesPayload }),
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to import ${workflow.name}:`, error)
|
||||
}
|
||||
}
|
||||
} else if (jsonFiles.length > 0) {
|
||||
importedWorkflows = await extractWorkflowsFromFiles(jsonFiles)
|
||||
|
||||
for (const workflow of importedWorkflows) {
|
||||
try {
|
||||
const { data: workflowData, errors: parseErrors } = parseWorkflowJson(
|
||||
workflow.content
|
||||
)
|
||||
|
||||
if (!workflowData || parseErrors.length > 0) {
|
||||
logger.warn(`Failed to parse ${workflow.name}:`, parseErrors)
|
||||
continue
|
||||
}
|
||||
|
||||
const workflowName = extractWorkflowName(workflow.content)
|
||||
const { clearDiff } = useWorkflowDiffStore.getState()
|
||||
clearDiff()
|
||||
|
||||
const newWorkflowId = await createWorkflow({
|
||||
name: workflowName,
|
||||
description: 'Imported from JSON',
|
||||
workspaceId,
|
||||
})
|
||||
|
||||
const response = await fetch(`/api/workflows/${newWorkflowId}/state`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(workflowData),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
logger.error(`Failed to save imported workflow ${newWorkflowId}`)
|
||||
continue
|
||||
}
|
||||
|
||||
if (workflowData.variables && workflowData.variables.length > 0) {
|
||||
const variablesPayload = workflowData.variables.map((v: any) => ({
|
||||
id: typeof v.id === 'string' && v.id.trim() ? v.id : crypto.randomUUID(),
|
||||
workflowId: newWorkflowId,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
|
||||
await fetch(`/api/workflows/${newWorkflowId}/variables`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ variables: variablesPayload }),
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`Imported workflow: ${workflowName}`)
|
||||
} catch (error) {
|
||||
logger.error(`Failed to import ${workflow.name}:`, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const { loadWorkflows } = useWorkflowRegistry.getState()
|
||||
await loadWorkflows(workspaceId)
|
||||
|
||||
const { fetchFolders } = useFolderStore.getState()
|
||||
await fetchFolders(workspaceId)
|
||||
} catch (error) {
|
||||
logger.error('Failed to read file:', { error })
|
||||
}
|
||||
|
||||
// Reset file input
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
logger.error('Failed to import workflows:', error)
|
||||
} finally {
|
||||
setIsImporting(false)
|
||||
if (fileInputRef.current) {
|
||||
fileInputRef.current.value = ''
|
||||
}
|
||||
}
|
||||
},
|
||||
[handleDirectImport]
|
||||
[workspaceId, createWorkflow]
|
||||
)
|
||||
|
||||
// Button event handlers
|
||||
@@ -360,7 +462,7 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
>
|
||||
<Download className={iconClassName} />
|
||||
<span className={textClassName}>
|
||||
{isImporting ? 'Importing...' : 'Import workflow'}
|
||||
{isImporting ? 'Importing...' : 'Import Workflows'}
|
||||
</span>
|
||||
</button>
|
||||
</PopoverContent>
|
||||
@@ -369,7 +471,8 @@ export function CreateMenu({ onCreateWorkflow, isCreatingWorkflow = false }: Cre
|
||||
<input
|
||||
ref={fileInputRef}
|
||||
type='file'
|
||||
accept='.json'
|
||||
accept='.json,.zip'
|
||||
multiple
|
||||
style={{ display: 'none' }}
|
||||
onChange={handleFileChange}
|
||||
/>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
'use client'
|
||||
|
||||
import { useCallback, useEffect, useRef, useState } from 'react'
|
||||
import { LogOut, Pencil, Plus, Send, Trash2 } from 'lucide-react'
|
||||
import { Download, LogOut, Pencil, Plus, Send, Trash2 } from 'lucide-react'
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
@@ -84,6 +84,7 @@ export function WorkspaceSelector({
|
||||
} | null>(null)
|
||||
const [isDeleteDialogOpen, setIsDeleteDialogOpen] = useState(false)
|
||||
const [workspaceToDelete, setWorkspaceToDelete] = useState<Workspace | null>(null)
|
||||
const [isExporting, setIsExporting] = useState(false)
|
||||
|
||||
// Refs
|
||||
const scrollAreaRef = useRef<HTMLDivElement>(null)
|
||||
@@ -300,6 +301,128 @@ export function WorkspaceSelector({
|
||||
[onLeaveWorkspace]
|
||||
)
|
||||
|
||||
/**
|
||||
* Handle export workspace
|
||||
*/
|
||||
const handleExportWorkspace = useCallback(async () => {
|
||||
if (!activeWorkspace || isExporting) return
|
||||
|
||||
setIsExporting(true)
|
||||
try {
|
||||
const { exportWorkspaceToZip } = await import('@/lib/workflows/import-export')
|
||||
const { useFolderStore } = await import('@/stores/folders/store')
|
||||
|
||||
const workflowsUrl = new URL('/api/workflows', window.location.origin)
|
||||
workflowsUrl.searchParams.append('workspaceId', activeWorkspace.id)
|
||||
|
||||
const workflowsResponse = await fetch(workflowsUrl.toString())
|
||||
if (!workflowsResponse.ok) {
|
||||
throw new Error('Failed to fetch workflows')
|
||||
}
|
||||
|
||||
const { data: workflows } = await workflowsResponse.json()
|
||||
|
||||
if (!workflows || workflows.length === 0) {
|
||||
logger.warn('No workflows found to export')
|
||||
return
|
||||
}
|
||||
|
||||
const foldersUrl = new URL('/api/folders', window.location.origin)
|
||||
foldersUrl.searchParams.append('workspaceId', activeWorkspace.id)
|
||||
|
||||
const foldersResponse = await fetch(foldersUrl.toString())
|
||||
const foldersData = foldersResponse.ok ? await foldersResponse.json() : { folders: [] }
|
||||
|
||||
const workflowsToExport: Array<{
|
||||
workflow: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
folderId?: string | null
|
||||
}
|
||||
state: any
|
||||
variables?: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
|
||||
value: any
|
||||
}>
|
||||
}> = []
|
||||
|
||||
for (const workflow of workflows) {
|
||||
try {
|
||||
const workflowResponse = await fetch(`/api/workflows/${workflow.id}`)
|
||||
if (!workflowResponse.ok) {
|
||||
logger.warn(`Failed to fetch workflow ${workflow.id}`)
|
||||
continue
|
||||
}
|
||||
|
||||
const { data: workflowData } = await workflowResponse.json()
|
||||
if (!workflowData?.state) {
|
||||
logger.warn(`Workflow ${workflow.id} has no state`)
|
||||
continue
|
||||
}
|
||||
|
||||
const variablesResponse = await fetch(`/api/workflows/${workflow.id}/variables`)
|
||||
let workflowVariables: any[] = []
|
||||
if (variablesResponse.ok) {
|
||||
const variablesData = await variablesResponse.json()
|
||||
workflowVariables = Object.values(variablesData?.data || {}).map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
}))
|
||||
}
|
||||
|
||||
workflowsToExport.push({
|
||||
workflow: {
|
||||
id: workflow.id,
|
||||
name: workflow.name,
|
||||
description: workflow.description,
|
||||
folderId: workflow.folderId,
|
||||
},
|
||||
state: workflowData.state,
|
||||
variables: workflowVariables,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflow.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
const foldersToExport: Array<{
|
||||
id: string
|
||||
name: string
|
||||
parentId: string | null
|
||||
}> = (foldersData.folders || []).map((folder: any) => ({
|
||||
id: folder.id,
|
||||
name: folder.name,
|
||||
parentId: folder.parentId,
|
||||
}))
|
||||
|
||||
const zipBlob = await exportWorkspaceToZip(
|
||||
activeWorkspace.name,
|
||||
workflowsToExport,
|
||||
foldersToExport
|
||||
)
|
||||
|
||||
const blobUrl = URL.createObjectURL(zipBlob)
|
||||
const a = document.createElement('a')
|
||||
a.href = blobUrl
|
||||
a.download = `${activeWorkspace.name.replace(/[^a-z0-9]/gi, '-')}-workspace-export.zip`
|
||||
document.body.appendChild(a)
|
||||
a.click()
|
||||
document.body.removeChild(a)
|
||||
URL.revokeObjectURL(blobUrl)
|
||||
|
||||
logger.info(`Exported ${workflowsToExport.length} workflows from workspace`)
|
||||
} catch (error) {
|
||||
logger.error('Failed to export workspace:', error)
|
||||
} finally {
|
||||
setIsExporting(false)
|
||||
}
|
||||
}, [activeWorkspace, isExporting])
|
||||
|
||||
// Render workspace list
|
||||
const renderWorkspaceList = () => {
|
||||
if (isWorkspacesLoading) {
|
||||
@@ -379,6 +502,26 @@ export function WorkspaceSelector({
|
||||
className='flex h-full flex-shrink-0 items-center justify-center gap-1'
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
{/* Export button - show on hover for admin users */}
|
||||
{!isEditing &&
|
||||
isHovered &&
|
||||
workspace.permissions === 'admin' &&
|
||||
activeWorkspace?.id === workspace.id && (
|
||||
<Button
|
||||
variant='ghost'
|
||||
size='icon'
|
||||
onClick={(e) => {
|
||||
e.stopPropagation()
|
||||
handleExportWorkspace()
|
||||
}}
|
||||
disabled={isExporting}
|
||||
className='h-4 w-4 p-0 text-muted-foreground transition-colors hover:bg-transparent hover:text-foreground'
|
||||
title='Export workspace'
|
||||
>
|
||||
<Download className='!h-3.5 !w-3.5' />
|
||||
</Button>
|
||||
)}
|
||||
|
||||
{/* Edit button - show on hover for admin users */}
|
||||
{!isEditing && isHovered && workspace.permissions === 'admin' && (
|
||||
<Button
|
||||
|
||||
192
apps/sim/lib/workflows/import-export.ts
Normal file
192
apps/sim/lib/workflows/import-export.ts
Normal file
@@ -0,0 +1,192 @@
|
||||
import JSZip from 'jszip'
|
||||
import { createLogger } from '@/lib/logs/console/logger'
|
||||
import type { WorkflowState } from '@/stores/workflows/workflow/types'
|
||||
import { sanitizeForExport } from './json-sanitizer'
|
||||
|
||||
const logger = createLogger('WorkflowImportExport')
|
||||
|
||||
export interface WorkflowExportData {
|
||||
workflow: {
|
||||
id: string
|
||||
name: string
|
||||
description?: string
|
||||
folderId?: string | null
|
||||
}
|
||||
state: WorkflowState
|
||||
variables?: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
|
||||
value: any
|
||||
}>
|
||||
}
|
||||
|
||||
export interface FolderExportData {
|
||||
id: string
|
||||
name: string
|
||||
parentId: string | null
|
||||
}
|
||||
|
||||
export interface WorkspaceExportStructure {
|
||||
workspace: {
|
||||
name: string
|
||||
exportedAt: string
|
||||
}
|
||||
workflows: WorkflowExportData[]
|
||||
folders: FolderExportData[]
|
||||
}
|
||||
|
||||
function sanitizePathSegment(name: string): string {
|
||||
return name.replace(/[^a-z0-9-_]/gi, '-')
|
||||
}
|
||||
|
||||
function buildFolderPath(
|
||||
folderId: string | null | undefined,
|
||||
foldersMap: Map<string, FolderExportData>
|
||||
): string {
|
||||
if (!folderId) return ''
|
||||
|
||||
const path: string[] = []
|
||||
let currentId: string | null = folderId
|
||||
|
||||
while (currentId && foldersMap.has(currentId)) {
|
||||
const folder: FolderExportData = foldersMap.get(currentId)!
|
||||
path.unshift(sanitizePathSegment(folder.name))
|
||||
currentId = folder.parentId
|
||||
}
|
||||
|
||||
return path.join('/')
|
||||
}
|
||||
|
||||
export async function exportWorkspaceToZip(
|
||||
workspaceName: string,
|
||||
workflows: WorkflowExportData[],
|
||||
folders: FolderExportData[]
|
||||
): Promise<Blob> {
|
||||
const zip = new JSZip()
|
||||
const foldersMap = new Map(folders.map((f) => [f.id, f]))
|
||||
|
||||
const metadata = {
|
||||
workspace: {
|
||||
name: workspaceName,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
folders: folders.map((f) => ({ id: f.id, name: f.name, parentId: f.parentId })),
|
||||
}
|
||||
|
||||
zip.file('_workspace.json', JSON.stringify(metadata, null, 2))
|
||||
|
||||
for (const workflow of workflows) {
|
||||
try {
|
||||
const workflowState = {
|
||||
...workflow.state,
|
||||
metadata: {
|
||||
name: workflow.workflow.name,
|
||||
description: workflow.workflow.description,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflow.variables,
|
||||
}
|
||||
|
||||
const exportState = sanitizeForExport(workflowState)
|
||||
const sanitizedName = sanitizePathSegment(workflow.workflow.name)
|
||||
const filename = `${sanitizedName}-${workflow.workflow.id}.json`
|
||||
|
||||
const folderPath = buildFolderPath(workflow.workflow.folderId, foldersMap)
|
||||
const fullPath = folderPath ? `${folderPath}/${filename}` : filename
|
||||
|
||||
zip.file(fullPath, JSON.stringify(exportState, null, 2))
|
||||
} catch (error) {
|
||||
logger.error(`Failed to export workflow ${workflow.workflow.id}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return await zip.generateAsync({ type: 'blob' })
|
||||
}
|
||||
|
||||
export interface ImportedWorkflow {
|
||||
content: string
|
||||
name: string
|
||||
folderPath: string[]
|
||||
}
|
||||
|
||||
export interface WorkspaceImportMetadata {
|
||||
workspaceName: string
|
||||
exportedAt?: string
|
||||
}
|
||||
|
||||
export async function extractWorkflowsFromZip(
|
||||
zipFile: File
|
||||
): Promise<{ workflows: ImportedWorkflow[]; metadata?: WorkspaceImportMetadata }> {
|
||||
const zip = await JSZip.loadAsync(await zipFile.arrayBuffer())
|
||||
const workflows: ImportedWorkflow[] = []
|
||||
let metadata: WorkspaceImportMetadata | undefined
|
||||
|
||||
for (const [path, file] of Object.entries(zip.files)) {
|
||||
if (file.dir) continue
|
||||
|
||||
if (path === '_workspace.json') {
|
||||
try {
|
||||
const content = await file.async('string')
|
||||
const parsed = JSON.parse(content)
|
||||
metadata = {
|
||||
workspaceName: parsed.workspace?.name || 'Imported Workspace',
|
||||
exportedAt: parsed.workspace?.exportedAt,
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to parse workspace metadata:', error)
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if (!path.toLowerCase().endsWith('.json')) continue
|
||||
|
||||
try {
|
||||
const content = await file.async('string')
|
||||
const pathParts = path.split('/').filter((p) => p.length > 0)
|
||||
const filename = pathParts.pop() || path
|
||||
|
||||
workflows.push({
|
||||
content,
|
||||
name: filename,
|
||||
folderPath: pathParts,
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to extract ${path}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return { workflows, metadata }
|
||||
}
|
||||
|
||||
export async function extractWorkflowsFromFiles(files: File[]): Promise<ImportedWorkflow[]> {
|
||||
const workflows: ImportedWorkflow[] = []
|
||||
|
||||
for (const file of files) {
|
||||
if (!file.name.toLowerCase().endsWith('.json')) continue
|
||||
|
||||
try {
|
||||
const content = await file.text()
|
||||
workflows.push({
|
||||
content,
|
||||
name: file.name,
|
||||
folderPath: [],
|
||||
})
|
||||
} catch (error) {
|
||||
logger.error(`Failed to read ${file.name}:`, error)
|
||||
}
|
||||
}
|
||||
|
||||
return workflows
|
||||
}
|
||||
|
||||
export function extractWorkflowName(content: string): string {
|
||||
try {
|
||||
const parsed = JSON.parse(content)
|
||||
if (parsed.state?.metadata?.name && typeof parsed.state.metadata.name === 'string') {
|
||||
return parsed.state.metadata.name.trim()
|
||||
}
|
||||
} catch {}
|
||||
|
||||
return `Imported Workflow ${new Date().toLocaleString()}`
|
||||
}
|
||||
@@ -49,6 +49,17 @@ export interface ExportWorkflowState {
|
||||
edges: Edge[]
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
metadata?: {
|
||||
name?: string
|
||||
description?: string
|
||||
exportedAt?: string
|
||||
}
|
||||
variables?: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
|
||||
value: any
|
||||
}>
|
||||
}
|
||||
}
|
||||
|
||||
@@ -369,35 +380,32 @@ export function sanitizeForCopilot(state: WorkflowState): CopilotWorkflowState {
|
||||
* Users need positions to restore the visual layout when importing
|
||||
*/
|
||||
export function sanitizeForExport(state: WorkflowState): ExportWorkflowState {
|
||||
// Deep clone to avoid mutating original state
|
||||
const clonedState = JSON.parse(
|
||||
JSON.stringify({
|
||||
blocks: state.blocks,
|
||||
edges: state.edges,
|
||||
loops: state.loops || {},
|
||||
parallels: state.parallels || {},
|
||||
metadata: state.metadata,
|
||||
variables: state.variables,
|
||||
})
|
||||
)
|
||||
|
||||
// Remove sensitive data from subblocks
|
||||
Object.values(clonedState.blocks).forEach((block: any) => {
|
||||
if (block.subBlocks) {
|
||||
Object.entries(block.subBlocks).forEach(([key, subBlock]: [string, any]) => {
|
||||
// Clear OAuth credentials and API keys based on field name only
|
||||
if (
|
||||
/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key) ||
|
||||
subBlock.type === 'oauth-input'
|
||||
) {
|
||||
subBlock.value = ''
|
||||
}
|
||||
// Remove knowledge base tag filters and document tags (workspace-specific data)
|
||||
if (key === 'tagFilters' || key === 'documentTags') {
|
||||
subBlock.value = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// Also clear from data field if present
|
||||
if (block.data) {
|
||||
Object.entries(block.data).forEach(([key, value]: [string, any]) => {
|
||||
if (/credential|oauth|api[_-]?key|token|secret|auth|password|bearer/i.test(key)) {
|
||||
|
||||
@@ -84,6 +84,7 @@
|
||||
"jose": "6.0.11",
|
||||
"js-tiktoken": "1.0.21",
|
||||
"js-yaml": "4.1.0",
|
||||
"jszip": "3.10.1",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lucide-react": "^0.479.0",
|
||||
"mammoth": "^1.9.0",
|
||||
|
||||
@@ -8,6 +8,7 @@ const logger = createLogger('WorkflowJsonImporter')
|
||||
* Generate new IDs for all blocks and edges to avoid conflicts
|
||||
*/
|
||||
function regenerateIds(workflowState: WorkflowState): WorkflowState {
|
||||
const { metadata, variables } = workflowState
|
||||
const blockIdMap = new Map<string, string>()
|
||||
const newBlocks: WorkflowState['blocks'] = {}
|
||||
|
||||
@@ -99,6 +100,8 @@ function regenerateIds(workflowState: WorkflowState): WorkflowState {
|
||||
edges: newEdges,
|
||||
loops: newLoops,
|
||||
parallels: newParallels,
|
||||
metadata,
|
||||
variables,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -206,11 +209,18 @@ export function parseWorkflowJson(
|
||||
edges: workflowData.edges || [],
|
||||
loops: workflowData.loops || {},
|
||||
parallels: workflowData.parallels || {},
|
||||
metadata: workflowData.metadata,
|
||||
variables: Array.isArray(workflowData.variables) ? workflowData.variables : undefined,
|
||||
}
|
||||
|
||||
// Regenerate IDs if requested (default: true)
|
||||
if (regenerateIdsFlag) {
|
||||
workflowState = regenerateIds(workflowState)
|
||||
const regeneratedState = regenerateIds(workflowState)
|
||||
workflowState = {
|
||||
...regeneratedState,
|
||||
metadata: workflowState.metadata,
|
||||
variables: workflowState.variables,
|
||||
}
|
||||
logger.info('Regenerated IDs for imported workflow to avoid conflicts')
|
||||
}
|
||||
|
||||
|
||||
@@ -23,8 +23,7 @@ export const useWorkflowJsonStore = create<WorkflowJsonStore>()(
|
||||
lastGenerated: undefined,
|
||||
|
||||
generateJson: () => {
|
||||
// Get the active workflow ID from registry
|
||||
const { activeWorkflowId } = useWorkflowRegistry.getState()
|
||||
const { activeWorkflowId, workflows } = useWorkflowRegistry.getState()
|
||||
|
||||
if (!activeWorkflowId) {
|
||||
logger.warn('No active workflow to generate JSON for')
|
||||
@@ -32,7 +31,6 @@ export const useWorkflowJsonStore = create<WorkflowJsonStore>()(
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the workflow state with merged subblock values
|
||||
const workflow = getWorkflowWithValues(activeWorkflowId)
|
||||
|
||||
if (!workflow || !workflow.state) {
|
||||
@@ -40,9 +38,27 @@ export const useWorkflowJsonStore = create<WorkflowJsonStore>()(
|
||||
return
|
||||
}
|
||||
|
||||
const workflowState = workflow.state
|
||||
const workflowMetadata = workflows[activeWorkflowId]
|
||||
const { useVariablesStore } = require('@/stores/panel/variables/store')
|
||||
const workflowVariables = useVariablesStore
|
||||
.getState()
|
||||
.getVariablesByWorkflowId(activeWorkflowId)
|
||||
|
||||
const workflowState = {
|
||||
...workflow.state,
|
||||
metadata: {
|
||||
name: workflowMetadata?.name,
|
||||
description: workflowMetadata?.description,
|
||||
exportedAt: new Date().toISOString(),
|
||||
},
|
||||
variables: workflowVariables.map((v: any) => ({
|
||||
id: v.id,
|
||||
name: v.name,
|
||||
type: v.type,
|
||||
value: v.value,
|
||||
})),
|
||||
}
|
||||
|
||||
// Sanitize for export (keeps positions, removes secrets, adds version)
|
||||
const exportState: ExportWorkflowState = sanitizeForExport(workflowState)
|
||||
|
||||
// Convert to formatted JSON
|
||||
|
||||
@@ -153,13 +153,21 @@ export interface WorkflowState {
|
||||
loops: Record<string, Loop>
|
||||
parallels: Record<string, Parallel>
|
||||
lastUpdate?: number
|
||||
// Legacy deployment fields (keeping for compatibility)
|
||||
metadata?: {
|
||||
name?: string
|
||||
description?: string
|
||||
exportedAt?: string
|
||||
}
|
||||
variables?: Array<{
|
||||
id: string
|
||||
name: string
|
||||
type: 'string' | 'number' | 'boolean' | 'object' | 'array' | 'plain'
|
||||
value: any
|
||||
}>
|
||||
isDeployed?: boolean
|
||||
deployedAt?: Date
|
||||
// New field for per-workflow deployment status
|
||||
deploymentStatuses?: Record<string, DeploymentStatus>
|
||||
needsRedeployment?: boolean
|
||||
// Drag state for undo/redo
|
||||
dragStartPosition?: DragStartPosition | null
|
||||
}
|
||||
|
||||
|
||||
25
bun.lock
25
bun.lock
@@ -120,6 +120,7 @@
|
||||
"jose": "6.0.11",
|
||||
"js-tiktoken": "1.0.21",
|
||||
"js-yaml": "4.1.0",
|
||||
"jszip": "3.10.1",
|
||||
"jwt-decode": "^4.0.0",
|
||||
"lucide-react": "^0.479.0",
|
||||
"mammoth": "^1.9.0",
|
||||
@@ -2737,7 +2738,7 @@
|
||||
|
||||
"rxjs": ["rxjs@7.8.2", "", { "dependencies": { "tslib": "^2.1.0" } }, "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
"safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
@@ -3383,8 +3384,6 @@
|
||||
|
||||
"anymatch/picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"basic-auth/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
|
||||
|
||||
"better-auth/jose": ["jose@6.1.0", "", {}, "sha512-TTQJyoEoKcC1lscpVDCSsVgYzUDg/0Bt3WE//WiTPK6uOCQC2KZS4MpugbMWt/zyjkopgZoXhZuCi00gLudfUA=="],
|
||||
|
||||
"better-auth/zod": ["zod@4.1.11", "", {}, "sha512-WPsqwxITS2tzx1bzhIKsEs19ABD5vmCVa4xBo2tq/SrV4RNZtfws1EnCWQXM6yh8bD08a1idvkB5MZSBiZsjwg=="],
|
||||
@@ -3401,8 +3400,12 @@
|
||||
|
||||
"concat-stream/readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"content-disposition/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"dom-serializer/entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||
|
||||
"ecdsa-sig-formatter/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"engine.io/@types/node": ["@types/node@24.2.1", "", { "dependencies": { "undici-types": "~7.10.0" } }, "sha512-DRh5K+ka5eJic8CjH7td8QpYEV6Zo10gfRkjHCO3weqZHWDtAaSTFtl4+VMqOJ4N5jcuhZ9/l+yy8rVgw7BQeQ=="],
|
||||
|
||||
"engine.io/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="],
|
||||
@@ -3461,6 +3464,10 @@
|
||||
|
||||
"jsondiffpatch/chalk": ["chalk@5.6.2", "", {}, "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA=="],
|
||||
|
||||
"jwa/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"jws/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"linebreak/base64-js": ["base64-js@0.0.8", "", {}, "sha512-3XSA2cR/h/73EzlXXdU6YNycmYI7+kicTxks4eJg2g39biHR84slg2+des+p7iHYhbRg/udIS4TD53WabcOUkw=="],
|
||||
|
||||
"lint-staged/chalk": ["chalk@5.6.2", "", {}, "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA=="],
|
||||
@@ -3533,8 +3540,6 @@
|
||||
|
||||
"react-promise-suspense/fast-deep-equal": ["fast-deep-equal@2.0.1", "", {}, "sha512-bCK/2Z4zLidyB4ReuIsvALH6w31YfAQDmXMqMx6FyfHqvBxtjC0eRumeSu4Bs3XtXwpyIywtSTrVT99BxY1f9w=="],
|
||||
|
||||
"readable-stream/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream": ["readable-stream@4.7.0", "", { "dependencies": { "abort-controller": "^3.0.0", "buffer": "^6.0.3", "events": "^3.3.0", "process": "^0.11.10", "string_decoder": "^1.3.0" } }, "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg=="],
|
||||
|
||||
"resend/@react-email/render": ["@react-email/render@1.1.2", "", { "dependencies": { "html-to-text": "^9.0.5", "prettier": "^3.5.3", "react-promise-suspense": "^0.3.4" }, "peerDependencies": { "react": "^18.0 || ^19.0 || ^19.0.0-rc", "react-dom": "^18.0 || ^19.0 || ^19.0.0-rc" } }, "sha512-RnRehYN3v9gVlNMehHPHhyp2RQo7+pSkHDtXPvg3s0GbzM9SQMW4Qrf8GRNvtpLC4gsI+Wt0VatNRUFqjvevbw=="],
|
||||
@@ -3577,8 +3582,6 @@
|
||||
|
||||
"string-width-cjs/emoji-regex": ["emoji-regex@8.0.0", "", {}, "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="],
|
||||
|
||||
"string_decoder/safe-buffer": ["safe-buffer@5.1.2", "", {}, "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="],
|
||||
|
||||
"sucrase/commander": ["commander@4.1.1", "", {}, "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA=="],
|
||||
|
||||
"sucrase/glob": ["glob@10.4.5", "", { "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^3.1.2", "minimatch": "^9.0.4", "minipass": "^7.1.2", "package-json-from-dist": "^1.0.0", "path-scurry": "^1.11.1" }, "bin": { "glob": "dist/esm/bin.mjs" } }, "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="],
|
||||
@@ -3727,12 +3730,16 @@
|
||||
|
||||
"google-auth-library/jws/jwa": ["jwa@2.0.1", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg=="],
|
||||
|
||||
"google-auth-library/jws/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"groq-sdk/@types/node/undici-types": ["undici-types@5.26.5", "", {}, "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA=="],
|
||||
|
||||
"groq-sdk/node-fetch/whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
|
||||
|
||||
"gtoken/jws/jwa": ["jwa@2.0.1", "", { "dependencies": { "buffer-equal-constant-time": "^1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg=="],
|
||||
|
||||
"gtoken/jws/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"inquirer/ora/is-interactive": ["is-interactive@1.0.0", "", {}, "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w=="],
|
||||
|
||||
"inquirer/ora/is-unicode-supported": ["is-unicode-supported@0.1.0", "", {}, "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw=="],
|
||||
@@ -3787,6 +3794,8 @@
|
||||
|
||||
"oauth2-mock-server/express/qs": ["qs@6.13.0", "", { "dependencies": { "side-channel": "^1.0.6" } }, "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg=="],
|
||||
|
||||
"oauth2-mock-server/express/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"oauth2-mock-server/express/send": ["send@0.19.0", "", { "dependencies": { "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", "encodeurl": "~1.0.2", "escape-html": "~1.0.3", "etag": "~1.8.1", "fresh": "0.5.2", "http-errors": "2.0.0", "mime": "1.6.0", "ms": "2.1.3", "on-finished": "2.4.1", "range-parser": "~1.2.1", "statuses": "2.0.1" } }, "sha512-dW41u5VfLXu8SJh5bwRmyYUbAoSB3c9uQh6L8h/KtsFREPWpbX1lrljJo186Jc4nmci/sGUZ9a0a0J2zgfq2hw=="],
|
||||
|
||||
"oauth2-mock-server/express/serve-static": ["serve-static@1.16.2", "", { "dependencies": { "encodeurl": "~2.0.0", "escape-html": "~1.0.3", "parseurl": "~1.3.3", "send": "0.19.0" } }, "sha512-VqpjJZKadQB/PEbEwvFdO43Ax5dFBZ2UECszz8bQ7pi7wt//PWe1P6MN7eCnjsatYtBT6EuiClbjSWP2WrIoTw=="],
|
||||
@@ -3919,6 +3928,8 @@
|
||||
|
||||
"ora/cli-cursor/restore-cursor/onetime": ["onetime@7.0.0", "", { "dependencies": { "mimic-function": "^5.0.0" } }, "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="],
|
||||
|
||||
"readable-web-to-node-stream/readable-stream/string_decoder/safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"sim/tailwindcss/chokidar/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"sim/tailwindcss/chokidar/readdirp": ["readdirp@3.6.0", "", { "dependencies": { "picomatch": "^2.2.1" } }, "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="],
|
||||
|
||||
Reference in New Issue
Block a user