Compare commits

..

7 Commits

Author SHA1 Message Date
Vikhyath Mondreti
bff1852a85 v0.3.56: i18n, sharepoint operations & logs search improvements 2025-09-18 14:21:14 -07:00
Adam Gough
7327b448e5 Improvement(sharepoint): added ability to create list items, different from create list (#1379)
* added add list items

(cherry picked from commit df6ea35d5bb975c03c7ec0c787bd915f34890ac0)

* bun run lint

* minor changes

---------

Co-authored-by: Adam Gough <adamgough@Mac.attlocal.net>
Co-authored-by: Adam Gough <adamgough@Adams-MacBook-Pro.local>
2025-09-18 14:18:58 -07:00
Waleed
eb1e90bb7f improvement(search): added more granular logs search, added logs export, improved overall search experience (#1378)
* improvement(search): added more granular logs search, added logs export, improved overall search experience

* updated tests
2025-09-18 13:58:44 -07:00
Vikhyath Mondreti
3905d1cb81 fix(selectors): gdrive and slack selectors inf loops (#1376)
* fix(selectors): gdrive and slack selectors inf loops

* remove comment
2025-09-18 11:40:36 -07:00
Waleed
cd084e8236 fix(actions): updated i18n gh action to use PAT instead of default token (#1377) 2025-09-18 11:29:02 -07:00
Waleed
5d96484501 fix(variables): remove quote stripping from short & long inputs (#1375)
* fix(variables): remove quote stripping from short & long inputs

* restore env

* remove quote stripping everywhere

* remove unused file
2025-09-18 11:04:22 -07:00
Waleed
6747a497fc fix(migrations): upgrade drizzle-kit in migrations container (#1374)
* fix(migrations): upgrade drizzle-kit in migrations container

* fix comments

* rm unused file
2025-09-18 11:04:06 -07:00
33 changed files with 845 additions and 199 deletions

View File

@@ -21,7 +21,7 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v4
with:
token: ${{ secrets.GITHUB_TOKEN }}
token: ${{ secrets.GH_PAT }}
fetch-depth: 0
- name: Setup Bun
@@ -53,7 +53,7 @@ jobs:
if: steps.changes.outputs.changes == 'true'
uses: peter-evans/create-pull-request@v5
with:
token: ${{ secrets.GITHUB_TOKEN }}
token: ${{ secrets.GH_PAT }}
commit-message: "feat(i18n): update translations"
title: "🌐 Auto-update translations"
body: |

View File

@@ -17,6 +17,7 @@
"google_calendar",
"google_docs",
"google_drive",
"google_forms",
"google_search",
"google_sheets",
"huggingface",

View File

@@ -183,6 +183,25 @@ Update the properties (fields) on a SharePoint list item
| --------- | ---- | ----------- |
| `item` | object | Updated SharePoint list item |
### `sharepoint_add_list_items`
Add a new item to a SharePoint list
#### Input
| Parameter | Type | Required | Description |
| --------- | ---- | -------- | ----------- |
| `siteSelector` | string | No | Select the SharePoint site |
| `siteId` | string | No | The ID of the SharePoint site \(internal use\) |
| `listId` | string | Yes | The ID of the list to add the item to |
| `listItemFields` | object | Yes | Field values for the new list item |
#### Output
| Parameter | Type | Description |
| --------- | ---- | ----------- |
| `item` | object | Created SharePoint list item |
## Notes

View File

@@ -0,0 +1,200 @@
import { db } from '@sim/db'
import { permissions, workflow, workflowExecutionLogs } from '@sim/db/schema'
import { and, desc, eq, gte, inArray, lte, type SQL, sql } from 'drizzle-orm'
import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { getSession } from '@/lib/auth'
import { createLogger } from '@/lib/logs/console/logger'
const logger = createLogger('LogsExportAPI')
export const revalidate = 0
const ExportParamsSchema = z.object({
level: z.string().optional(),
workflowIds: z.string().optional(),
folderIds: z.string().optional(),
triggers: z.string().optional(),
startDate: z.string().optional(),
endDate: z.string().optional(),
search: z.string().optional(),
workflowName: z.string().optional(),
folderName: z.string().optional(),
workspaceId: z.string(),
})
function escapeCsv(value: any): string {
if (value === null || value === undefined) return ''
const str = String(value)
if (/[",\n]/.test(str)) {
return `"${str.replace(/"/g, '""')}"`
}
return str
}
export async function GET(request: NextRequest) {
try {
const session = await getSession()
if (!session?.user?.id) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
}
const userId = session.user.id
const { searchParams } = new URL(request.url)
const params = ExportParamsSchema.parse(Object.fromEntries(searchParams.entries()))
const selectColumns = {
id: workflowExecutionLogs.id,
workflowId: workflowExecutionLogs.workflowId,
executionId: workflowExecutionLogs.executionId,
level: workflowExecutionLogs.level,
trigger: workflowExecutionLogs.trigger,
startedAt: workflowExecutionLogs.startedAt,
endedAt: workflowExecutionLogs.endedAt,
totalDurationMs: workflowExecutionLogs.totalDurationMs,
cost: workflowExecutionLogs.cost,
executionData: workflowExecutionLogs.executionData,
workflowName: workflow.name,
}
let conditions: SQL | undefined = eq(workflow.workspaceId, params.workspaceId)
if (params.level && params.level !== 'all') {
conditions = and(conditions, eq(workflowExecutionLogs.level, params.level))
}
if (params.workflowIds) {
const workflowIds = params.workflowIds.split(',').filter(Boolean)
if (workflowIds.length > 0) conditions = and(conditions, inArray(workflow.id, workflowIds))
}
if (params.folderIds) {
const folderIds = params.folderIds.split(',').filter(Boolean)
if (folderIds.length > 0) conditions = and(conditions, inArray(workflow.folderId, folderIds))
}
if (params.triggers) {
const triggers = params.triggers.split(',').filter(Boolean)
if (triggers.length > 0 && !triggers.includes('all')) {
conditions = and(conditions, inArray(workflowExecutionLogs.trigger, triggers))
}
}
if (params.startDate) {
conditions = and(conditions, gte(workflowExecutionLogs.startedAt, new Date(params.startDate)))
}
if (params.endDate) {
conditions = and(conditions, lte(workflowExecutionLogs.startedAt, new Date(params.endDate)))
}
if (params.search) {
const term = `%${params.search}%`
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${term}`)
}
if (params.workflowName) {
const nameTerm = `%${params.workflowName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
}
if (params.folderName) {
const folderTerm = `%${params.folderName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
}
const header = [
'startedAt',
'level',
'workflow',
'trigger',
'durationMs',
'costTotal',
'workflowId',
'executionId',
'message',
'traceSpans',
].join(',')
const encoder = new TextEncoder()
const stream = new ReadableStream<Uint8Array>({
start: async (controller) => {
controller.enqueue(encoder.encode(`${header}\n`))
const pageSize = 1000
let offset = 0
try {
while (true) {
const rows = await db
.select(selectColumns)
.from(workflowExecutionLogs)
.innerJoin(workflow, eq(workflowExecutionLogs.workflowId, workflow.id))
.innerJoin(
permissions,
and(
eq(permissions.entityType, 'workspace'),
eq(permissions.entityId, workflow.workspaceId),
eq(permissions.userId, userId)
)
)
.where(conditions)
.orderBy(desc(workflowExecutionLogs.startedAt))
.limit(pageSize)
.offset(offset)
if (!rows.length) break
for (const r of rows as any[]) {
let message = ''
let traces: any = null
try {
const ed = (r as any).executionData
if (ed) {
if (ed.finalOutput)
message =
typeof ed.finalOutput === 'string'
? ed.finalOutput
: JSON.stringify(ed.finalOutput)
if (ed.message) message = ed.message
if (ed.traceSpans) traces = ed.traceSpans
}
} catch {}
const line = [
escapeCsv(r.startedAt?.toISOString?.() || r.startedAt),
escapeCsv(r.level),
escapeCsv(r.workflowName),
escapeCsv(r.trigger),
escapeCsv(r.totalDurationMs ?? ''),
escapeCsv(r.cost?.total ?? r.cost?.value?.total ?? ''),
escapeCsv(r.workflowId ?? ''),
escapeCsv(r.executionId ?? ''),
escapeCsv(message),
escapeCsv(traces ? JSON.stringify(traces) : ''),
].join(',')
controller.enqueue(encoder.encode(`${line}\n`))
}
offset += pageSize
}
controller.close()
} catch (e: any) {
logger.error('Export stream error', { error: e?.message })
try {
controller.error(e)
} catch {}
}
},
})
const ts = new Date().toISOString().replace(/[:.]/g, '-')
const filename = `logs-${ts}.csv`
return new NextResponse(stream as any, {
status: 200,
headers: {
'Content-Type': 'text/csv; charset=utf-8',
'Content-Disposition': `attachment; filename="${filename}"`,
'Cache-Control': 'no-cache',
},
})
} catch (error: any) {
logger.error('Export error', { error: error?.message })
return NextResponse.json({ error: 'Internal server error' }, { status: 500 })
}
}

View File

@@ -22,6 +22,8 @@ const QueryParamsSchema = z.object({
startDate: z.string().optional(),
endDate: z.string().optional(),
search: z.string().optional(),
workflowName: z.string().optional(),
folderName: z.string().optional(),
workspaceId: z.string(),
})
@@ -155,6 +157,18 @@ export async function GET(request: NextRequest) {
conditions = and(conditions, sql`${workflowExecutionLogs.executionId} ILIKE ${searchTerm}`)
}
// Filter by workflow name (from advanced search input)
if (params.workflowName) {
const nameTerm = `%${params.workflowName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${nameTerm}`)
}
// Filter by folder name (best-effort text match when present on workflows)
if (params.folderName) {
const folderTerm = `%${params.folderName}%`
conditions = and(conditions, sql`${workflow.name} ILIKE ${folderTerm}`)
}
// Execute the query using the optimized join
const logs = await baseQuery
.where(conditions)

View File

@@ -1,5 +1,6 @@
import { useEffect, useMemo, useState } from 'react'
import { Check, ChevronDown } from 'lucide-react'
import { useParams } from 'next/navigation'
import { Button } from '@/components/ui/button'
import {
Command,
@@ -26,20 +27,27 @@ interface WorkflowOption {
}
export default function Workflow() {
const { workflowIds, toggleWorkflowId, setWorkflowIds } = useFilterStore()
const { workflowIds, toggleWorkflowId, setWorkflowIds, folderIds } = useFilterStore()
const params = useParams()
const workspaceId = params?.workspaceId as string | undefined
const [workflows, setWorkflows] = useState<WorkflowOption[]>([])
const [loading, setLoading] = useState(true)
const [search, setSearch] = useState('')
// Fetch all available workflows from the API
useEffect(() => {
const fetchWorkflows = async () => {
try {
setLoading(true)
const response = await fetch('/api/workflows')
const query = workspaceId ? `?workspaceId=${encodeURIComponent(workspaceId)}` : ''
const response = await fetch(`/api/workflows${query}`)
if (response.ok) {
const { data } = await response.json()
const workflowOptions: WorkflowOption[] = data.map((workflow: any) => ({
const scoped = Array.isArray(data)
? folderIds.length > 0
? data.filter((w: any) => (w.folderId ? folderIds.includes(w.folderId) : false))
: data
: []
const workflowOptions: WorkflowOption[] = scoped.map((workflow: any) => ({
id: workflow.id,
name: workflow.name,
color: workflow.color || '#3972F6',
@@ -54,7 +62,7 @@ export default function Workflow() {
}
fetchWorkflows()
}, [])
}, [workspaceId, folderIds])
const getSelectedWorkflowsText = () => {
if (workflowIds.length === 0) return 'All workflows'

View File

@@ -1,7 +1,7 @@
'use client'
import { useMemo } from 'react'
import { Search, X } from 'lucide-react'
import { useEffect, useMemo } from 'react'
import { Loader2, Search, X } from 'lucide-react'
import { Badge } from '@/components/ui/badge'
import { Button } from '@/components/ui/button'
import { Input } from '@/components/ui/input'
@@ -17,6 +17,7 @@ interface AutocompleteSearchProps {
availableWorkflows?: string[]
availableFolders?: string[]
className?: string
onOpenChange?: (open: boolean) => void
}
export function AutocompleteSearch({
@@ -26,6 +27,7 @@ export function AutocompleteSearch({
availableWorkflows = [],
availableFolders = [],
className,
onOpenChange,
}: AutocompleteSearchProps) {
const suggestionEngine = useMemo(() => {
return new SearchSuggestions(availableWorkflows, availableFolders)
@@ -42,6 +44,8 @@ export function AutocompleteSearch({
handleKeyDown,
handleFocus,
handleBlur,
reset: resetAutocomplete,
closeDropdown,
} = useAutocomplete({
getSuggestions: (inputValue, cursorPos) =>
suggestionEngine.getSuggestions(inputValue, cursorPos),
@@ -52,10 +56,39 @@ export function AutocompleteSearch({
debounceMs: 100,
})
const clearAll = () => {
resetAutocomplete()
closeDropdown()
onChange('')
if (inputRef.current) {
inputRef.current.focus()
}
}
const parsedQuery = parseQuery(value)
const hasFilters = parsedQuery.filters.length > 0
const hasTextSearch = parsedQuery.textSearch.length > 0
const listboxId = 'logs-search-listbox'
const inputId = 'logs-search-input'
useEffect(() => {
onOpenChange?.(state.isOpen)
}, [state.isOpen, onOpenChange])
useEffect(() => {
if (!state.isOpen || state.highlightedIndex < 0) return
const container = dropdownRef.current
const optionEl = document.getElementById(`${listboxId}-option-${state.highlightedIndex}`)
if (container && optionEl) {
try {
optionEl.scrollIntoView({ block: 'nearest', behavior: 'smooth' })
} catch {
optionEl.scrollIntoView({ block: 'nearest' })
}
}
}, [state.isOpen, state.highlightedIndex])
const onInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
const newValue = e.target.value
const cursorPos = e.target.selectionStart || 0
@@ -77,8 +110,10 @@ export function AutocompleteSearch({
)
const newQuery = [...filterStrings, parsedQuery.textSearch].filter(Boolean).join(' ')
onChange(newQuery)
handleInputChange(newQuery, newQuery.length)
if (inputRef.current) {
inputRef.current.focus()
}
}
return (
@@ -91,24 +126,37 @@ export function AutocompleteSearch({
state.isOpen && 'ring-1 ring-ring'
)}
>
<Search className='h-4 w-4 flex-shrink-0 text-muted-foreground' strokeWidth={2} />
{state.pendingQuery ? (
<Loader2 className='h-4 w-4 flex-shrink-0 animate-spin text-muted-foreground' />
) : (
<Search className='h-4 w-4 flex-shrink-0 text-muted-foreground' strokeWidth={2} />
)}
{/* Text display with ghost text */}
<div className='relative flex-1 font-[380] font-sans text-base leading-none'>
{/* Invisible input for cursor and interactions */}
<Input
ref={inputRef}
id={inputId}
placeholder={state.inputValue ? '' : placeholder}
value={state.inputValue}
onChange={onInputChange}
onFocus={handleFocus}
onBlur={handleBlur}
onClick={(e) => updateCursorPosition(e.currentTarget)}
onKeyUp={(e) => updateCursorPosition(e.currentTarget)}
onKeyDown={handleKeyDown}
onSelect={(e) => updateCursorPosition(e.currentTarget)}
className='relative z-10 w-full border-0 bg-transparent p-0 font-[380] font-sans text-base text-transparent leading-none placeholder:text-muted-foreground focus-visible:ring-0 focus-visible:ring-offset-0'
style={{ background: 'transparent' }}
role='combobox'
aria-expanded={state.isOpen}
aria-controls={state.isOpen ? listboxId : undefined}
aria-autocomplete='list'
aria-activedescendant={
state.isOpen && state.highlightedIndex >= 0
? `${listboxId}-option-${state.highlightedIndex}`
: undefined
}
/>
{/* Always-visible text overlay */}
@@ -134,7 +182,10 @@ export function AutocompleteSearch({
variant='ghost'
size='sm'
className='h-6 w-6 p-0 hover:bg-muted/50'
onClick={() => onChange('')}
onMouseDown={(e) => {
e.preventDefault()
clearAll()
}}
>
<X className='h-3 w-3' />
</Button>
@@ -145,7 +196,10 @@ export function AutocompleteSearch({
{state.isOpen && state.suggestions.length > 0 && (
<div
ref={dropdownRef}
className='absolute z-[9999] mt-1 w-full min-w-[500px] overflow-hidden rounded-md border bg-popover shadow-md'
className='min-w[500px] absolute z-[9999] mt-1 w-full overflow-hidden rounded-md border bg-popover shadow-md'
id={listboxId}
role='listbox'
aria-labelledby={inputId}
>
<div className='max-h-96 overflow-y-auto py-1'>
{state.suggestionType === 'filter-keys' && (
@@ -168,12 +222,20 @@ export function AutocompleteSearch({
'transition-colors hover:bg-accent hover:text-accent-foreground',
index === state.highlightedIndex && 'bg-accent text-accent-foreground'
)}
onMouseEnter={() => handleSuggestionHover(index)}
onMouseEnter={() => {
if (typeof window !== 'undefined' && (window as any).__logsKeyboardNavActive) {
return
}
handleSuggestionHover(index)
}}
onMouseDown={(e) => {
e.preventDefault()
e.stopPropagation()
handleSuggestionSelect(suggestion)
}}
id={`${listboxId}-option-${index}`}
role='option'
aria-selected={index === state.highlightedIndex}
>
<div className='flex items-center justify-between'>
<div className='flex-1'>
@@ -226,7 +288,14 @@ export function AutocompleteSearch({
variant='ghost'
size='sm'
className='h-6 text-muted-foreground text-xs hover:text-foreground'
onClick={() => onChange(parsedQuery.textSearch)}
onMouseDown={(e) => {
e.preventDefault()
const newQuery = parsedQuery.textSearch
handleInputChange(newQuery, newQuery.length)
if (inputRef.current) {
inputRef.current.focus()
}
}}
>
Clear all
</Button>

View File

@@ -1,11 +1,21 @@
import { useCallback, useMemo, useReducer, useRef } from 'react'
import { useCallback, useEffect, useMemo, useReducer, useRef } from 'react'
export interface Suggestion {
id: string
value: string
label: string
description?: string
category?: string
category?:
| 'filters'
| 'level'
| 'trigger'
| 'cost'
| 'date'
| 'duration'
| 'workflow'
| 'folder'
| 'workflowId'
| 'executionId'
}
export interface SuggestionGroup {
@@ -43,6 +53,7 @@ type AutocompleteAction =
| { type: 'SET_PREVIEW'; payload: { value: string; show: boolean } }
| { type: 'CLEAR_PREVIEW' }
| { type: 'SET_QUERY_VALIDITY'; payload: boolean }
| { type: 'SET_PENDING'; payload: string | null }
| { type: 'RESET' }
const initialState: AutocompleteState = {
@@ -126,6 +137,12 @@ function autocompleteReducer(
isValidQuery: action.payload,
}
case 'SET_PENDING':
return {
...state,
pendingQuery: action.payload,
}
case 'RESET':
return initialState
@@ -153,6 +170,16 @@ export function useAutocomplete({
const inputRef = useRef<HTMLInputElement>(null)
const dropdownRef = useRef<HTMLDivElement>(null)
const debounceRef = useRef<NodeJS.Timeout | null>(null)
const pointerDownInDropdownRef = useRef<boolean>(false)
const latestRef = useRef<{ inputValue: string; cursorPosition: number }>({
inputValue: '',
cursorPosition: 0,
})
useEffect(() => {
latestRef.current.inputValue = state.inputValue
latestRef.current.cursorPosition = state.cursorPosition
}, [state.inputValue, state.cursorPosition])
const currentSuggestion = useMemo(() => {
if (state.highlightedIndex >= 0 && state.suggestions[state.highlightedIndex]) {
@@ -162,13 +189,14 @@ export function useAutocomplete({
}, [state.highlightedIndex, state.suggestions])
const updateSuggestions = useCallback(() => {
const suggestionGroup = getSuggestions(state.inputValue, state.cursorPosition)
const { inputValue, cursorPosition } = latestRef.current
const suggestionGroup = getSuggestions(inputValue, cursorPosition)
if (suggestionGroup && suggestionGroup.suggestions.length > 0) {
dispatch({ type: 'OPEN_DROPDOWN', payload: suggestionGroup })
const firstSuggestion = suggestionGroup.suggestions[0]
const preview = generatePreview(firstSuggestion, state.inputValue, state.cursorPosition)
const preview = generatePreview(firstSuggestion, inputValue, cursorPosition)
dispatch({
type: 'HIGHLIGHT_SUGGESTION',
payload: { index: 0, preview },
@@ -176,7 +204,7 @@ export function useAutocomplete({
} else {
dispatch({ type: 'CLOSE_DROPDOWN' })
}
}, [state.inputValue, state.cursorPosition, getSuggestions, generatePreview])
}, [getSuggestions, generatePreview])
const handleInputChange = useCallback(
(value: string, cursorPosition: number) => {
@@ -193,7 +221,11 @@ export function useAutocomplete({
clearTimeout(debounceRef.current)
}
debounceRef.current = setTimeout(updateSuggestions, debounceMs)
dispatch({ type: 'SET_PENDING', payload: value })
debounceRef.current = setTimeout(() => {
dispatch({ type: 'SET_PENDING', payload: null })
updateSuggestions()
}, debounceMs)
},
[updateSuggestions, onQueryChange, validateQuery, debounceMs]
)
@@ -257,6 +289,11 @@ export function useAutocomplete({
})
}
if (debounceRef.current) {
clearTimeout(debounceRef.current)
debounceRef.current = null
}
dispatch({ type: 'SET_PENDING', payload: null })
setTimeout(updateSuggestions, 0)
},
[
@@ -273,6 +310,16 @@ export function useAutocomplete({
const handleKeyDown = useCallback(
(event: React.KeyboardEvent) => {
if (event.key === 'Enter') {
event.preventDefault()
if (state.isOpen) {
handleSuggestionSelect()
} else if (state.isValidQuery) {
updateSuggestions()
}
return
}
if (!state.isOpen) return
switch (event.key) {
@@ -290,11 +337,6 @@ export function useAutocomplete({
break
}
case 'Enter':
event.preventDefault()
handleSuggestionSelect()
break
case 'Escape':
event.preventDefault()
dispatch({ type: 'CLOSE_DROPDOWN' })
@@ -324,12 +366,37 @@ export function useAutocomplete({
updateSuggestions()
}, [updateSuggestions])
const handleBlur = useCallback(() => {
const handleBlur = useCallback((e?: React.FocusEvent) => {
const related = (e?.relatedTarget as Node) || document.activeElement
const isInsideDropdown = related && dropdownRef.current?.contains(related)
const isInsideInput = related && inputRef.current === related
if (pointerDownInDropdownRef.current || isInsideDropdown || isInsideInput) {
return
}
setTimeout(() => {
dispatch({ type: 'CLOSE_DROPDOWN' })
}, 150)
}, [])
useEffect(() => {
const dropdownEl = dropdownRef.current
if (!dropdownEl) return
const onPointerDown = () => {
pointerDownInDropdownRef.current = true
}
const onPointerUp = () => {
setTimeout(() => {
pointerDownInDropdownRef.current = false
}, 0)
}
dropdownEl.addEventListener('pointerdown', onPointerDown)
window.addEventListener('pointerup', onPointerUp)
return () => {
dropdownEl.removeEventListener('pointerdown', onPointerDown)
window.removeEventListener('pointerup', onPointerUp)
}
}, [])
return {
// State
state,

View File

@@ -12,6 +12,7 @@ import { AutocompleteSearch } from '@/app/workspace/[workspaceId]/logs/component
import { Sidebar } from '@/app/workspace/[workspaceId]/logs/components/sidebar/sidebar'
import { formatDate } from '@/app/workspace/[workspaceId]/logs/utils/format-date'
import { useDebounce } from '@/hooks/use-debounce'
import { useFolderStore } from '@/stores/folders/store'
import { useFilterStore } from '@/stores/logs/filters/store'
import type { LogsResponse, WorkflowLog } from '@/stores/logs/filters/types'
@@ -77,7 +78,6 @@ export default function Logs() {
triggers,
} = useFilterStore()
// Set workspace ID in store when component mounts or workspaceId changes
useEffect(() => {
setWorkspaceId(workspaceId)
}, [workspaceId])
@@ -94,11 +94,9 @@ export default function Logs() {
const scrollContainerRef = useRef<HTMLDivElement>(null)
const isInitialized = useRef<boolean>(false)
// Local search state with debouncing for the header
const [searchQuery, setSearchQuery] = useState(storeSearchQuery)
const debouncedSearchQuery = useDebounce(searchQuery, 300)
// Available data for suggestions
const [availableWorkflows, setAvailableWorkflows] = useState<string[]>([])
const [availableFolders, setAvailableFolders] = useState<string[]>([])
@@ -106,29 +104,63 @@ export default function Logs() {
const [isLive, setIsLive] = useState(false)
const [isRefreshing, setIsRefreshing] = useState(false)
const liveIntervalRef = useRef<NodeJS.Timeout | null>(null)
const isSearchOpenRef = useRef<boolean>(false)
// Sync local search query with store search query
useEffect(() => {
setSearchQuery(storeSearchQuery)
}, [storeSearchQuery])
const { fetchFolders, getFolderTree } = useFolderStore()
useEffect(() => {
const workflowNames = new Set<string>()
const folderNames = new Set<string>()
let cancelled = false
logs.forEach((log) => {
if (log.workflow?.name) {
workflowNames.add(log.workflow.name)
const fetchSuggestions = async () => {
try {
const res = await fetch(`/api/workflows?workspaceId=${encodeURIComponent(workspaceId)}`)
if (res.ok) {
const body = await res.json()
const names: string[] = Array.isArray(body?.data)
? body.data.map((w: any) => w?.name).filter(Boolean)
: []
if (!cancelled) setAvailableWorkflows(names)
} else {
if (!cancelled) setAvailableWorkflows([])
}
await fetchFolders(workspaceId)
const tree = getFolderTree(workspaceId)
const flatten = (nodes: any[], parentPath = ''): string[] => {
const out: string[] = []
for (const n of nodes) {
const path = parentPath ? `${parentPath} / ${n.name}` : n.name
out.push(path)
if (n.children?.length) out.push(...flatten(n.children, path))
}
return out
}
const folderPaths: string[] = Array.isArray(tree) ? flatten(tree) : []
if (!cancelled) setAvailableFolders(folderPaths)
} catch {
if (!cancelled) {
setAvailableWorkflows([])
setAvailableFolders([])
}
}
// Note: folder info would need to be added to the logs response
// For now, we'll leave folders empty
})
}
setAvailableWorkflows(Array.from(workflowNames).slice(0, 10)) // Limit to top 10
setAvailableFolders([]) // TODO: Add folder data to logs response
}, [logs])
if (workspaceId) {
fetchSuggestions()
}
return () => {
cancelled = true
}
}, [workspaceId, fetchFolders, getFolderTree])
// Update store when debounced search query changes
useEffect(() => {
if (isInitialized.current && debouncedSearchQuery !== storeSearchQuery) {
setStoreSearchQuery(debouncedSearchQuery)
@@ -142,12 +174,10 @@ export default function Logs() {
setIsSidebarOpen(true)
setIsDetailsLoading(true)
// Fetch details for current, previous, and next concurrently with cache
const currentId = log.id
const prevId = index > 0 ? logs[index - 1]?.id : undefined
const nextId = index < logs.length - 1 ? logs[index + 1]?.id : undefined
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
@@ -167,7 +197,6 @@ export default function Logs() {
if (nextId && !detailsCacheRef.current.has(nextId))
idsToFetch.push({ id: nextId, merge: false })
// Merge cached current immediately
if (cachedCurrent) {
setSelectedLog((prev) =>
prev && prev.id === currentId
@@ -207,7 +236,6 @@ export default function Logs() {
setSelectedLogIndex(nextIndex)
const nextLog = logs[nextIndex]
setSelectedLog(nextLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
@@ -265,7 +293,6 @@ export default function Logs() {
setSelectedLogIndex(prevIndex)
const prevLog = logs[prevIndex]
setSelectedLog(prevLog)
// Abort any previous details fetch batch
if (detailsAbortRef.current) {
try {
detailsAbortRef.current.abort()
@@ -340,19 +367,16 @@ export default function Logs() {
setIsFetchingMore(true)
}
// Get fresh query params by calling buildQueryParams from store
const { buildQueryParams: getCurrentQueryParams } = useFilterStore.getState()
const queryParams = getCurrentQueryParams(pageNum, LOGS_PER_PAGE)
// Parse the current search query for enhanced filtering
const parsedQuery = parseQuery(searchQuery)
const { searchQuery: currentSearchQuery } = useFilterStore.getState()
const parsedQuery = parseQuery(currentSearchQuery)
const enhancedParams = queryToApiParams(parsedQuery)
// Add enhanced search parameters to the query string
const allParams = new URLSearchParams(queryParams)
Object.entries(enhancedParams).forEach(([key, value]) => {
if (key === 'triggers' && allParams.has('triggers')) {
// Combine triggers from both sources
const existingTriggers = allParams.get('triggers')?.split(',') || []
const searchTriggers = value.split(',')
const combined = [...new Set([...existingTriggers, ...searchTriggers])]
@@ -429,7 +453,27 @@ export default function Logs() {
setIsLive(!isLive)
}
// Initialize filters from URL on mount
const handleExport = async () => {
const params = new URLSearchParams()
params.set('workspaceId', workspaceId)
if (level !== 'all') params.set('level', level)
if (triggers.length > 0) params.set('triggers', triggers.join(','))
if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(','))
if (folderIds.length > 0) params.set('folderIds', folderIds.join(','))
const parsed = parseQuery(debouncedSearchQuery)
const extra = queryToApiParams(parsed)
Object.entries(extra).forEach(([k, v]) => params.set(k, v))
const url = `/api/logs/export?${params.toString()}`
const a = document.createElement('a')
a.href = url
a.download = 'logs_export.csv'
document.body.appendChild(a)
a.click()
a.remove()
}
useEffect(() => {
if (!isInitialized.current) {
isInitialized.current = true
@@ -437,7 +481,6 @@ export default function Logs() {
}
}, [initializeFromURL])
// Handle browser navigation events (back/forward)
useEffect(() => {
const handlePopState = () => {
initializeFromURL()
@@ -447,43 +490,34 @@ export default function Logs() {
return () => window.removeEventListener('popstate', handlePopState)
}, [initializeFromURL])
// Single useEffect to handle both initial load and filter changes
useEffect(() => {
// Only fetch logs after initialization
if (!isInitialized.current) {
return
}
// Reset pagination and fetch from beginning
setPage(1)
setHasMore(true)
// Inline fetch logic to avoid circular dependency
const fetchWithFilters = async () => {
try {
setLoading(true)
// Build query params inline to avoid dependency issues
const params = new URLSearchParams()
params.set('details', 'basic')
params.set('limit', LOGS_PER_PAGE.toString())
params.set('offset', '0') // Always start from page 1
params.set('workspaceId', workspaceId)
// Parse the search query for enhanced filtering
const parsedQuery = parseQuery(searchQuery)
const parsedQuery = parseQuery(debouncedSearchQuery)
const enhancedParams = queryToApiParams(parsedQuery)
// Add filters from store
if (level !== 'all') params.set('level', level)
if (triggers.length > 0) params.set('triggers', triggers.join(','))
if (workflowIds.length > 0) params.set('workflowIds', workflowIds.join(','))
if (folderIds.length > 0) params.set('folderIds', folderIds.join(','))
// Add enhanced search parameters (these may override some store filters)
Object.entries(enhancedParams).forEach(([key, value]) => {
if (key === 'triggers' && params.has('triggers')) {
// Combine triggers from both sources
const storeTriggers = params.get('triggers')?.split(',') || []
const searchTriggers = value.split(',')
const combined = [...new Set([...storeTriggers, ...searchTriggers])]
@@ -493,7 +527,6 @@ export default function Logs() {
}
})
// Add time range filter
if (timeRange !== 'All time') {
const now = new Date()
let startDate: Date
@@ -532,7 +565,7 @@ export default function Logs() {
}
fetchWithFilters()
}, [workspaceId, timeRange, level, workflowIds, folderIds, searchQuery, triggers])
}, [workspaceId, timeRange, level, workflowIds, folderIds, debouncedSearchQuery, triggers])
const loadMoreLogs = useCallback(() => {
if (!isFetchingMore && hasMore) {
@@ -598,6 +631,7 @@ export default function Logs() {
useEffect(() => {
const handleKeyDown = (e: KeyboardEvent) => {
if (isSearchOpenRef.current) return
if (logs.length === 0) return
if (selectedLogIndex === -1 && (e.key === 'ArrowUp' || e.key === 'ArrowDown')) {
@@ -651,9 +685,12 @@ export default function Logs() {
placeholder='Search logs...'
availableWorkflows={availableWorkflows}
availableFolders={availableFolders}
onOpenChange={(open) => {
isSearchOpenRef.current = open
}}
/>
<div className='flex flex-shrink-0 items-center gap-3'>
<div className='ml-auto flex flex-shrink-0 items-center gap-3'>
<Tooltip>
<TooltipTrigger asChild>
<Button
@@ -674,6 +711,34 @@ export default function Logs() {
<TooltipContent>{isRefreshing ? 'Refreshing...' : 'Refresh'}</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant='ghost'
size='icon'
onClick={handleExport}
className='h-9 rounded-[11px] hover:bg-secondary'
aria-label='Export CSV'
>
{/* Download icon */}
<svg
xmlns='http://www.w3.org/2000/svg'
viewBox='0 0 24 24'
fill='none'
stroke='currentColor'
strokeWidth='2'
className='h-5 w-5'
>
<path d='M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4' />
<polyline points='7 10 12 15 17 10' />
<line x1='12' y1='15' x2='12' y2='3' />
</svg>
<span className='sr-only'>Export CSV</span>
</Button>
</TooltipTrigger>
<TooltipContent>Export CSV</TooltipContent>
</Tooltip>
<Button
className={`group h-9 gap-2 rounded-[11px] border bg-card text-card-foreground shadow-xs transition-all duration-200 hover:border-[var(--brand-primary-hex)] hover:bg-[var(--brand-primary-hex)] hover:text-white ${
isLive

View File

@@ -58,12 +58,21 @@ export function SlackChannelSelector({
body: JSON.stringify({ credential, workflowId }),
})
if (!res.ok) throw new Error(`HTTP error! status: ${res.status}`)
if (!res.ok) {
const errorData = await res
.json()
.catch(() => ({ error: `HTTP error! status: ${res.status}` }))
setError(errorData.error || `HTTP error! status: ${res.status}`)
setChannels([])
setInitialFetchDone(true)
return
}
const data = await res.json()
if (data.error) {
setError(data.error)
setChannels([])
setInitialFetchDone(true)
} else {
setChannels(data.channels)
setInitialFetchDone(true)
@@ -72,6 +81,7 @@ export function SlackChannelSelector({
if ((err as Error).name === 'AbortError') return
setError((err as Error).message)
setChannels([])
setInitialFetchDone(true)
} finally {
setLoading(false)
}

View File

@@ -432,7 +432,7 @@ export function ComboBox({
style={{ right: '42px' }}
>
<div className='w-full truncate text-foreground' style={{ scrollbarWidth: 'none' }}>
{formatDisplayText(displayValue, true)}
{formatDisplayText(displayValue)}
</div>
</div>
{/* Chevron button */}

View File

@@ -100,7 +100,9 @@ export function GoogleDrivePicker({
if (response.ok) {
const data = await response.json()
setCredentials(data.credentials)
// Do not auto-select. Respect persisted credential via prop when provided.
if (credentialId && !data.credentials.some((c: any) => c.id === credentialId)) {
setSelectedCredentialId('')
}
}
} catch (error) {
logger.error('Error fetching credentials:', { error })
@@ -151,6 +153,14 @@ export function GoogleDrivePicker({
onChange('')
onFileInfoChange?.(null)
}
if (response.status === 401) {
logger.info('Credential unauthorized (401), clearing selection and prompting re-auth')
setSelectedFileId('')
onChange('')
onFileInfoChange?.(null)
setShowOAuthModal(true)
}
}
return null
} catch (error) {

View File

@@ -406,7 +406,7 @@ export function LongInput({
overflow: 'hidden',
}}
>
{formatDisplayText(value?.toString() ?? '', true)}
{formatDisplayText(value?.toString() ?? '')}
</div>
{/* Wand Button */}

View File

@@ -417,7 +417,7 @@ export function ShortInput({
>
{password && !isFocused
? '•'.repeat(value?.toString().length ?? 0)
: formatDisplayText(value?.toString() ?? '', true)}
: formatDisplayText(value?.toString() ?? '')}
</div>
</div>

View File

@@ -471,8 +471,7 @@ export function FieldFormat({
style={{ scrollbarWidth: 'none', minWidth: 'fit-content' }}
>
{formatDisplayText(
(localValues[field.id] ?? field.value ?? '')?.toString(),
true
(localValues[field.id] ?? field.value ?? '')?.toString()
)}
</div>
</div>

View File

@@ -337,7 +337,7 @@ export function McpServerModal({ open, onOpenChange, onServerCreated }: McpServe
className='whitespace-nowrap'
style={{ transform: `translateX(-${urlScrollLeft}px)` }}
>
{formatDisplayText(formData.url || '', true)}
{formatDisplayText(formData.url || '')}
</div>
</div>
</div>
@@ -389,7 +389,7 @@ export function McpServerModal({ open, onOpenChange, onServerCreated }: McpServe
transform: `translateX(-${headerScrollLeft[`key-${index}`] || 0}px)`,
}}
>
{formatDisplayText(key || '', true)}
{formatDisplayText(key || '')}
</div>
</div>
</div>
@@ -417,7 +417,7 @@ export function McpServerModal({ open, onOpenChange, onServerCreated }: McpServe
transform: `translateX(-${headerScrollLeft[`value-${index}`] || 0}px)`,
}}
>
{formatDisplayText(value || '', true)}
{formatDisplayText(value || '')}
</div>
</div>
</div>

View File

@@ -399,7 +399,7 @@ export function MCP() {
className='whitespace-nowrap'
style={{ transform: `translateX(-${urlScrollLeft}px)` }}
>
{formatDisplayText(formData.url || '', true)}
{formatDisplayText(formData.url || '')}
</div>
</div>
@@ -464,7 +464,7 @@ export function MCP() {
transform: `translateX(-${headerScrollLeft[`key-${index}`] || 0}px)`,
}}
>
{formatDisplayText(key || '', true)}
{formatDisplayText(key || '')}
</div>
</div>
</div>
@@ -500,7 +500,7 @@ export function MCP() {
transform: `translateX(-${headerScrollLeft[`value-${index}`] || 0}px)`,
}}
>
{formatDisplayText(value || '', true)}
{formatDisplayText(value || '')}
</div>
</div>
</div>
@@ -778,7 +778,7 @@ export function MCP() {
className='whitespace-nowrap'
style={{ transform: `translateX(-${urlScrollLeft}px)` }}
>
{formatDisplayText(formData.url || '', true)}
{formatDisplayText(formData.url || '')}
</div>
</div>
@@ -845,7 +845,7 @@ export function MCP() {
transform: `translateX(-${headerScrollLeft[`key-${index}`] || 0}px)`,
}}
>
{formatDisplayText(key || '', true)}
{formatDisplayText(key || '')}
</div>
</div>
</div>
@@ -881,7 +881,7 @@ export function MCP() {
transform: `translateX(-${headerScrollLeft[`value-${index}`] || 0}px)`,
}}
>
{formatDisplayText(value || '', true)}
{formatDisplayText(value || '')}
</div>
</div>
</div>

View File

@@ -16,7 +16,6 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
bgColor: '#E0E0E0',
icon: MicrosoftSharepointIcon,
subBlocks: [
// Operation selector
{
id: 'operation',
title: 'Operation',
@@ -29,9 +28,9 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
{ label: 'Create List', id: 'create_list' },
{ label: 'Read List', id: 'read_list' },
{ label: 'Update List', id: 'update_list' },
{ label: 'Add List Items', id: 'add_list_items' },
],
},
// Sharepoint Credentials
{
id: 'credential',
title: 'Microsoft Account',
@@ -81,6 +80,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
'create_list',
'read_list',
'update_list',
'add_list_items',
],
},
},
@@ -111,7 +111,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
layout: 'full',
placeholder: 'Enter list ID (GUID). Required for Update; optional for Read.',
canonicalParamId: 'listId',
condition: { field: 'operation', value: ['read_list', 'update_list'] },
condition: { field: 'operation', value: ['read_list', 'update_list', 'add_list_items'] },
},
{
@@ -178,7 +178,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
layout: 'full',
placeholder: 'Enter list item fields',
canonicalParamId: 'listItemFields',
condition: { field: 'operation', value: 'update_list' },
condition: { field: 'operation', value: ['update_list', 'add_list_items'] },
},
],
tools: {
@@ -189,6 +189,7 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
'sharepoint_create_list',
'sharepoint_get_list',
'sharepoint_update_list',
'sharepoint_add_list_items',
],
config: {
tool: (params) => {
@@ -205,6 +206,8 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
return 'sharepoint_get_list'
case 'update_list':
return 'sharepoint_update_list'
case 'add_list_items':
return 'sharepoint_add_list_items'
default:
throw new Error(`Invalid Sharepoint operation: ${params.operation}`)
}
@@ -212,7 +215,6 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
params: (params) => {
const { credential, siteSelector, manualSiteId, mimeType, ...rest } = params
// Use siteSelector if provided, otherwise use manualSiteId
const effectiveSiteId = (siteSelector || manualSiteId || '').trim()
const {
@@ -234,12 +236,10 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
})
}
}
// Ensure listItemFields is an object for the tool schema
if (typeof parsedItemFields !== 'object' || parsedItemFields === null) {
parsedItemFields = undefined
}
// Sanitize item ID (required by tool)
const rawItemId = providedItemId ?? listItemId
const sanitizedItemId =
rawItemId === undefined || rawItemId === null
@@ -252,10 +252,9 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
return undefined
}
// Debug logging for update_list param mapping
if (others.operation === 'update_list') {
if (others.operation === 'update_list' || others.operation === 'add_list_items') {
try {
logger.info('SharepointBlock update_list param check', {
logger.info('SharepointBlock list item param check', {
siteId: effectiveSiteId || undefined,
listId: (others as any)?.listId,
listTitle: (others as any)?.listTitle,
@@ -275,7 +274,6 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
pageSize: others.pageSize ? Number.parseInt(others.pageSize as string, 10) : undefined,
mimeType: mimeType,
...others,
// Map to tool param names
itemId: sanitizedItemId,
listItemFields: parsedItemFields,
includeColumns: coerceBoolean(includeColumns),
@@ -287,26 +285,20 @@ export const SharepointBlock: BlockConfig<SharepointResponse> = {
inputs: {
operation: { type: 'string', description: 'Operation to perform' },
credential: { type: 'string', description: 'Microsoft account credential' },
// Create Page operation inputs
pageName: { type: 'string', description: 'Page name' },
pageContent: { type: 'string', description: 'Page content' },
pageTitle: { type: 'string', description: 'Page title' },
// Read Page operation inputs
pageId: { type: 'string', description: 'Page ID' },
// List operation inputs
siteSelector: { type: 'string', description: 'Site selector' },
manualSiteId: { type: 'string', description: 'Manual site ID' },
pageSize: { type: 'number', description: 'Results per page' },
// Create List operation inputs
listDisplayName: { type: 'string', description: 'List display name' },
listDescription: { type: 'string', description: 'List description' },
listTemplate: { type: 'string', description: 'List template' },
// Read List operation inputs
listId: { type: 'string', description: 'List ID' },
listTitle: { type: 'string', description: 'List title' },
includeColumns: { type: 'boolean', description: 'Include columns in response' },
includeItems: { type: 'boolean', description: 'Include items in response' },
// Update List Item operation inputs
listItemId: { type: 'string', description: 'List item ID' },
listItemFields: { type: 'string', description: 'List item fields' },
},

View File

@@ -1,33 +1,19 @@
'use client'
import type { ReactNode } from 'react'
import { VariableManager } from '@/lib/variables/variable-manager'
/**
* Formats text by highlighting block references (<...>) and environment variables ({{...}})
* Used in code editor, long inputs, and short inputs for consistent syntax highlighting
*
* @param text The text to format
* @param stripQuotes Whether to strip unnecessary quotes from the text (for plain text variables)
*/
export function formatDisplayText(text: string, stripQuotes = false): ReactNode[] {
export function formatDisplayText(text: string): ReactNode[] {
if (!text) return []
// If stripQuotes is true, remove surrounding quotes that might have been added
// This is needed when displaying plain type variables in inputs
let processedText = text
if (stripQuotes && typeof text === 'string') {
// Use VariableManager to determine if quotes should be stripped
if (VariableManager.shouldStripQuotesForDisplay(text)) {
processedText = text.slice(1, -1)
}
}
// Split the text by both tag patterns <something.something> and {{ENV_VAR}}
const parts = processedText.split(/(<[^>]+>|\{\{[^}]+\}\})/g)
const parts = text.split(/(<[^>]+>|\{\{[^}]+\}\})/g)
return parts.map((part, index) => {
// Handle block references
if (part.startsWith('<') && part.endsWith('>')) {
return (
<span key={index} className='text-blue-500'>
@@ -36,7 +22,6 @@ export function formatDisplayText(text: string, stripQuotes = false): ReactNode[
)
}
// Handle environment variables
if (part.match(/^\{\{[^}]+\}\}$/)) {
return (
<span key={index} className='text-blue-500'>

View File

@@ -169,6 +169,12 @@ export function queryToApiParams(parsedQuery: ParsedQuery): Record<string, strin
}
break
case 'folder':
if (filter.operator === '=') {
params.folderName = filter.value as string
}
break
case 'execution':
if (filter.operator === '=' && parsedQuery.textSearch) {
params.search = `${parsedQuery.textSearch} ${filter.value}`.trim()
@@ -177,6 +183,18 @@ export function queryToApiParams(parsedQuery: ParsedQuery): Record<string, strin
}
break
case 'workflowId':
if (filter.operator === '=') {
params.workflowIds = String(filter.value)
}
break
case 'executionId':
if (filter.operator === '=') {
params.executionId = String(filter.value)
}
break
case 'date':
if (filter.operator === '=' && filter.value === 'today') {
const today = new Date()

View File

@@ -43,10 +43,10 @@ describe('SearchSuggestions', () => {
expect(result?.suggestions.some((s) => s.value === 'level:')).toBe(true)
})
it.concurrent('should return filter key suggestions for partial matches', () => {
it.concurrent('should return value suggestions for uniquely identified partial keys', () => {
const result = engine.getSuggestions('lev', 3)
expect(result?.type).toBe('filter-keys')
expect(result?.suggestions.some((s) => s.value === 'level:')).toBe(true)
expect(result?.type).toBe('filter-values')
expect(result?.suggestions.some((s) => s.value === 'error' || s.value === 'info')).toBe(true)
})
it.concurrent('should return filter value suggestions after colon', () => {
@@ -87,11 +87,16 @@ describe('SearchSuggestions', () => {
expect(result?.suggestions.length).toBeGreaterThan(0)
})
it.concurrent('should handle partial filter keys after existing filters', () => {
const result = engine.getSuggestions('level:error lev', 15)
expect(result?.type).toBe('filter-keys')
expect(result?.suggestions.some((s) => s.value === 'level:')).toBe(true)
})
it.concurrent(
'should surface value suggestions for uniquely matched partial keys after existing filters',
() => {
const result = engine.getSuggestions('level:error lev', 15)
expect(result?.type).toBe('filter-values')
expect(result?.suggestions.some((s) => s.value === 'error' || s.value === 'info')).toBe(
true
)
}
)
it.concurrent('should handle filter values after existing filters', () => {
const result = engine.getSuggestions('level:error level:', 18)

View File

@@ -62,6 +62,10 @@ export const FILTER_DEFINITIONS: FilterDefinition[] = [
{ value: 'this-week', label: 'This week', description: "This week's logs" },
{ value: 'last-week', label: 'Last week', description: "Last week's logs" },
{ value: 'this-month', label: 'This month', description: "This month's logs" },
// Friendly relative range shortcuts like Stripe
{ value: '"> 2 days ago"', label: '> 2 days ago', description: 'Newer than 2 days' },
{ value: '"> last week"', label: '> last week', description: 'Newer than last week' },
{ value: '">=2025/08/31"', label: '>= YYYY/MM/DD', description: 'Start date (YYYY/MM/DD)' },
],
},
{
@@ -228,6 +232,27 @@ export class SearchSuggestions {
}
}
// Always include id-based keys (workflowId, executionId)
const idKeys: Array<{ key: string; label: string; description: string }> = [
{ key: 'workflowId', label: 'Workflow ID', description: 'Filter by workflowId' },
{ key: 'executionId', label: 'Execution ID', description: 'Filter by executionId' },
]
for (const idDef of idKeys) {
const matchesIdKey =
!partialInput ||
idDef.key.toLowerCase().startsWith(partialInput.toLowerCase()) ||
idDef.label.toLowerCase().startsWith(partialInput.toLowerCase())
if (matchesIdKey) {
suggestions.push({
id: `filter-key-${idDef.key}`,
value: `${idDef.key}:`,
label: idDef.label,
description: idDef.description,
category: 'filters',
})
}
}
return suggestions
}
@@ -251,7 +276,7 @@ export class SearchSuggestions {
value: option.value,
label: option.label,
description: option.description,
category: filterKey,
category: filterKey as any,
})
}
}
@@ -294,6 +319,18 @@ export class SearchSuggestions {
return suggestions.slice(0, 8)
}
if (filterKey === 'workflowId' || filterKey === 'executionId') {
const example = partialInput || '"1234..."'
suggestions.push({
id: `filter-value-${filterKey}-example`,
value: example,
label: 'Enter exact ID',
description: 'Use quotes for the full ID',
category: filterKey,
})
return suggestions
}
return suggestions
}
@@ -321,6 +358,26 @@ export class SearchSuggestions {
switch (context.type) {
case 'initial':
case 'filter-key-partial': {
if (context.type === 'filter-key-partial' && context.partialInput) {
const matches = FILTER_DEFINITIONS.filter(
(f) =>
f.key.toLowerCase().startsWith(context.partialInput!.toLowerCase()) ||
f.label.toLowerCase().startsWith(context.partialInput!.toLowerCase())
)
if (matches.length === 1) {
const key = matches[0].key
const filterValueSuggestions = this.getFilterValueSuggestions(key, '')
if (filterValueSuggestions.length > 0) {
return {
type: 'filter-values',
filterKey: key,
suggestions: filterValueSuggestions,
}
}
}
}
const filterKeySuggestions = this.getFilterKeySuggestions(context.partialInput)
return filterKeySuggestions.length > 0
? {
@@ -367,9 +424,13 @@ export class SearchSuggestions {
context.startPosition !== undefined &&
context.endPosition !== undefined
) {
// Replace partial text: "lev" -> "level:"
const before = currentValue.slice(0, context.startPosition)
const after = currentValue.slice(context.endPosition)
const isFilterValue =
!!suggestion.category && FILTER_DEFINITIONS.some((f) => f.key === suggestion.category)
if (isFilterValue) {
return `${before}${suggestion.category}:${suggestion.value}${after}`
}
return `${before}${suggestion.value}${after}`
}
@@ -378,23 +439,18 @@ export class SearchSuggestions {
context.startPosition !== undefined &&
context.endPosition !== undefined
) {
// Replace partial filter value: "level:err" -> "level:error"
const before = currentValue.slice(0, context.startPosition)
const after = currentValue.slice(context.endPosition)
return `${before}${suggestion.value}${after}`
}
// For all other cases, append at the end with smart spacing:
let result = currentValue
if (currentValue.endsWith(':')) {
// Direct append for filter values: "level:" + "error" = "level:error"
result += suggestion.value
} else if (currentValue.endsWith(' ')) {
// Already has space, direct append: "level:error " + "trigger:" = "level:error trigger:"
result += suggestion.value
} else {
// Need space: "level:error" + " " + "trigger:" = "level:error trigger:"
result += ` ${suggestion.value}`
}

View File

@@ -215,21 +215,4 @@ describe('VariableManager', () => {
expect(VariableManager.formatForCodeContext(undefined, 'number')).toBe('undefined')
})
})
describe('shouldStripQuotesForDisplay', () => {
it.concurrent('should identify strings that need quotes stripped', () => {
expect(VariableManager.shouldStripQuotesForDisplay('"hello world"')).toBe(true)
expect(VariableManager.shouldStripQuotesForDisplay("'hello world'")).toBe(true)
expect(VariableManager.shouldStripQuotesForDisplay('hello world')).toBe(false)
expect(VariableManager.shouldStripQuotesForDisplay('""')).toBe(false) // Too short
expect(VariableManager.shouldStripQuotesForDisplay("''")).toBe(false) // Too short
})
it.concurrent('should handle edge cases', () => {
expect(VariableManager.shouldStripQuotesForDisplay('')).toBe(false)
expect(VariableManager.shouldStripQuotesForDisplay(null as any)).toBe(false)
expect(VariableManager.shouldStripQuotesForDisplay(undefined as any)).toBe(false)
expect(VariableManager.shouldStripQuotesForDisplay(42 as any)).toBe(false)
})
})
})

View File

@@ -225,7 +225,6 @@ export class VariableManager {
return typeof value === 'string' ? value : String(value)
}
if (type === 'string') {
// For backwards compatibility, add quotes only for string type in code context
return typeof value === 'string'
? JSON.stringify(value)
: VariableManager.formatValue(value, type, 'code')
@@ -233,16 +232,4 @@ export class VariableManager {
return VariableManager.formatValue(value, type, 'code')
}
/**
* Determines whether quotes should be stripped for display.
*/
static shouldStripQuotesForDisplay(value: string): boolean {
if (!value || typeof value !== 'string') return false
return (
(value.startsWith('"') && value.endsWith('"') && value.length > 2) ||
(value.startsWith("'") && value.endsWith("'") && value.length > 2)
)
}
}

View File

@@ -1,22 +0,0 @@
import { createApiKey } from './lib/api-key/auth'
console.log('=== Testing self-hosting scenario (no API_ENCRYPTION_KEY) ===')
// Check environment
console.log('ENCRYPTION_KEY:', `${process.env.ENCRYPTION_KEY?.slice(0, 10)}...`)
console.log('API_ENCRYPTION_KEY:', process.env.API_ENCRYPTION_KEY)
// Ensure API_ENCRYPTION_KEY is not set
process.env.API_ENCRYPTION_KEY = undefined
console.log('API_ENCRYPTION_KEY after delete:', process.env.API_ENCRYPTION_KEY)
try {
const result = await createApiKey(true)
console.log('Key generated:', !!result.key)
console.log('Encrypted key generated:', !!result.encryptedKey)
console.log('Encrypted key value:', result.encryptedKey)
console.log('Are they the same?', result.key === result.encryptedKey)
console.log('Would validation pass?', !!result.encryptedKey)
} catch (error) {
console.error('Error in createApiKey:', error)
}

View File

@@ -145,6 +145,7 @@ import { redditGetCommentsTool, redditGetPostsTool, redditHotPostsTool } from '@
import { s3GetObjectTool } from '@/tools/s3'
import { searchTool as serperSearch } from '@/tools/serper'
import {
sharepointAddListItemTool,
sharepointCreateListTool,
sharepointCreatePageTool,
sharepointGetListTool,
@@ -370,6 +371,7 @@ export const tools: Record<string, ToolConfig> = {
sharepoint_get_list: sharepointGetListTool,
sharepoint_create_list: sharepointCreateListTool,
sharepoint_update_list: sharepointUpdateListItemTool,
sharepoint_add_list_items: sharepointAddListItemTool,
// Provider chat tools
// Provider chat tools - handled separately in agent blocks
}

View File

@@ -0,0 +1,167 @@
import { createLogger } from '@/lib/logs/console/logger'
import type { SharepointAddListItemResponse, SharepointToolParams } from '@/tools/sharepoint/types'
import type { ToolConfig } from '@/tools/types'
const logger = createLogger('SharePointAddListItem')
export const addListItemTool: ToolConfig<SharepointToolParams, SharepointAddListItemResponse> = {
id: 'sharepoint_add_list_items',
name: 'Add SharePoint List Item',
description: 'Add a new item to a SharePoint list',
version: '1.0',
oauth: {
required: true,
provider: 'sharepoint',
additionalScopes: ['openid', 'profile', 'email', 'Sites.ReadWrite.All', 'offline_access'],
},
params: {
accessToken: {
type: 'string',
required: true,
visibility: 'hidden',
description: 'The access token for the SharePoint API',
},
siteSelector: {
type: 'string',
required: false,
visibility: 'user-only',
description: 'Select the SharePoint site',
},
siteId: {
type: 'string',
required: false,
visibility: 'hidden',
description: 'The ID of the SharePoint site (internal use)',
},
listId: {
type: 'string',
required: true,
visibility: 'user-only',
description: 'The ID of the list to add the item to',
},
listItemFields: {
type: 'object',
required: true,
visibility: 'user-only',
description: 'Field values for the new list item',
},
},
request: {
url: (params) => {
const siteId = params.siteId || params.siteSelector || 'root'
if (!params.listId) {
throw new Error('listId must be provided')
}
const listSegment = params.listId
return `https://graph.microsoft.com/v1.0/sites/${siteId}/lists/${listSegment}/items`
},
method: 'POST',
headers: (params) => ({
Authorization: `Bearer ${params.accessToken}`,
'Content-Type': 'application/json',
Accept: 'application/json',
}),
body: (params) => {
if (!params.listItemFields || Object.keys(params.listItemFields).length === 0) {
throw new Error('listItemFields must not be empty')
}
const providedFields =
typeof params.listItemFields === 'object' &&
params.listItemFields !== null &&
'fields' in (params.listItemFields as Record<string, unknown>) &&
Object.keys(params.listItemFields as Record<string, unknown>).length === 1
? ((params.listItemFields as any).fields as Record<string, unknown>)
: (params.listItemFields as Record<string, unknown>)
if (!providedFields || Object.keys(providedFields).length === 0) {
throw new Error('No fields provided to create the SharePoint list item')
}
const readOnlyFields = new Set<string>([
'Id',
'id',
'UniqueId',
'GUID',
'ContentTypeId',
'Created',
'Modified',
'Author',
'Editor',
'CreatedBy',
'ModifiedBy',
'AuthorId',
'EditorId',
'_UIVersionString',
'Attachments',
'FileRef',
'FileDirRef',
'FileLeafRef',
])
const entries = Object.entries(providedFields)
const creatableEntries = entries.filter(([key]) => !readOnlyFields.has(key))
if (creatableEntries.length !== entries.length) {
const removed = entries.filter(([key]) => readOnlyFields.has(key)).map(([key]) => key)
logger.warn('Removed read-only SharePoint fields from create', {
removed,
})
}
if (creatableEntries.length === 0) {
const requestedKeys = Object.keys(providedFields)
throw new Error(
`All provided fields are read-only and cannot be set: ${requestedKeys.join(', ')}`
)
}
const sanitizedFields = Object.fromEntries(creatableEntries)
logger.info('Creating SharePoint list item', {
listId: params.listId,
fieldsKeys: Object.keys(sanitizedFields),
})
return {
fields: sanitizedFields,
}
},
},
transformResponse: async (response: Response, params) => {
let data: any
try {
data = await response.json()
} catch {
data = undefined
}
const itemId: string | undefined = data?.id
const fields: Record<string, unknown> | undefined = data?.fields || params?.listItemFields
return {
success: true,
output: {
item: {
id: itemId || 'unknown',
fields,
},
},
}
},
outputs: {
item: {
type: 'object',
description: 'Created SharePoint list item',
properties: {
id: { type: 'string', description: 'Item ID' },
fields: { type: 'object', description: 'Field values for the new item' },
},
},
},
}

View File

@@ -1,3 +1,4 @@
import { addListItemTool } from '@/tools/sharepoint/add_list_items'
import { createListTool } from '@/tools/sharepoint/create_list'
import { createPageTool } from '@/tools/sharepoint/create_page'
import { getListTool } from '@/tools/sharepoint/get_list'
@@ -11,3 +12,4 @@ export const sharepointGetListTool = getListTool
export const sharepointListSitesTool = listSitesTool
export const sharepointReadPageTool = readPageTool
export const sharepointUpdateListItemTool = updateListItemTool
export const sharepointAddListItemTool = addListItemTool

View File

@@ -259,6 +259,7 @@ export type SharepointResponse =
| SharepointGetListResponse
| SharepointCreateListResponse
| SharepointUpdateListItemResponse
| SharepointAddListItemResponse
export interface SharepointGetListResponse extends ToolResponse {
output: {
@@ -282,3 +283,12 @@ export interface SharepointUpdateListItemResponse extends ToolResponse {
}
}
}
export interface SharepointAddListItemResponse extends ToolResponse {
output: {
item: {
id: string
fields?: Record<string, unknown>
}
}
}

View File

@@ -8,7 +8,7 @@
"@t3-oss/env-nextjs": "0.13.4",
"@vercel/analytics": "1.5.0",
"bcryptjs": "3.0.2",
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"geist": "^1.4.2",
"mongodb": "6.19.0",
"postgres": "^3.4.5",
@@ -21,7 +21,7 @@
"@biomejs/biome": "2.0.0-beta.5",
"@next/env": "15.4.1",
"@types/bcryptjs": "3.0.0",
"drizzle-kit": "^0.31.1",
"drizzle-kit": "^0.31.4",
"husky": "9.1.7",
"lint-staged": "16.0.0",
"turbo": "2.5.6",
@@ -224,7 +224,7 @@
"typescript": "^5.7.3",
},
"peerDependencies": {
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.5",
},
},
@@ -247,7 +247,7 @@
],
"overrides": {
"@next/env": "15.4.1",
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"next": "15.4.1",
"postgres": "^3.4.5",
"react": "19.1.0",
@@ -1980,7 +1980,7 @@
"drizzle-kit": ["drizzle-kit@0.31.4", "", { "dependencies": { "@drizzle-team/brocli": "^0.10.2", "@esbuild-kit/esm-loader": "^2.5.5", "esbuild": "^0.25.4", "esbuild-register": "^3.5.0" }, "bin": { "drizzle-kit": "bin.cjs" } }, "sha512-tCPWVZWZqWVx2XUsVpJRnH9Mx0ClVOf5YUHerZ5so1OKSlqww4zy1R5ksEdGRcO3tM3zj0PYN6V48TbQCL1RfA=="],
"drizzle-orm": ["drizzle-orm@0.41.0", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-7A4ZxhHk9gdlXmTdPj/lREtP+3u8KvZ4yEN6MYVxBzZGex5Wtdc+CWSbu7btgF6TB0N+MNPrvW7RKBbxJchs/Q=="],
"drizzle-orm": ["drizzle-orm@0.44.5", "", { "peerDependencies": { "@aws-sdk/client-rds-data": ">=3", "@cloudflare/workers-types": ">=4", "@electric-sql/pglite": ">=0.2.0", "@libsql/client": ">=0.10.0", "@libsql/client-wasm": ">=0.10.0", "@neondatabase/serverless": ">=0.10.0", "@op-engineering/op-sqlite": ">=2", "@opentelemetry/api": "^1.4.1", "@planetscale/database": ">=1.13", "@prisma/client": "*", "@tidbcloud/serverless": "*", "@types/better-sqlite3": "*", "@types/pg": "*", "@types/sql.js": "*", "@upstash/redis": ">=1.34.7", "@vercel/postgres": ">=0.8.0", "@xata.io/client": "*", "better-sqlite3": ">=7", "bun-types": "*", "expo-sqlite": ">=14.0.0", "gel": ">=2", "knex": "*", "kysely": "*", "mysql2": ">=2", "pg": ">=8", "postgres": ">=3", "sql.js": ">=1", "sqlite3": ">=5" }, "optionalPeers": ["@aws-sdk/client-rds-data", "@cloudflare/workers-types", "@electric-sql/pglite", "@libsql/client", "@libsql/client-wasm", "@neondatabase/serverless", "@op-engineering/op-sqlite", "@opentelemetry/api", "@planetscale/database", "@prisma/client", "@tidbcloud/serverless", "@types/better-sqlite3", "@types/pg", "@types/sql.js", "@upstash/redis", "@vercel/postgres", "@xata.io/client", "better-sqlite3", "bun-types", "expo-sqlite", "gel", "knex", "kysely", "mysql2", "pg", "postgres", "sql.js", "sqlite3"] }, "sha512-jBe37K7d8ZSKptdKfakQFdeljtu3P2Cbo7tJoJSVZADzIKOBo9IAJPOmMsH2bZl90bZgh8FQlD8BjxXA/zuBkQ=="],
"duck": ["duck@0.1.12", "", { "dependencies": { "underscore": "^1.13.1" } }, "sha512-wkctla1O6VfP89gQ+J/yDesM0S7B7XLXjKGzXxMDVFg7uEn706niAtyYovKbyq1oT9YwDcly721/iUWoc8MVRg=="],

View File

@@ -8,9 +8,8 @@ WORKDIR /app
COPY package.json bun.lock turbo.json ./
COPY packages/db/package.json ./packages/db/package.json
# Install minimal dependencies in one layer
RUN bun install --omit dev --ignore-scripts && \
bun install --omit dev --ignore-scripts drizzle-kit drizzle-orm postgres
# Install dependencies
RUN bun install --ignore-scripts
# ========================================
# Runner Stage: Production Environment

View File

@@ -30,7 +30,7 @@
"react-dom": "19.1.0",
"next": "15.4.1",
"@next/env": "15.4.1",
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.5"
},
"dependencies": {
@@ -38,7 +38,7 @@
"@t3-oss/env-nextjs": "0.13.4",
"@vercel/analytics": "1.5.0",
"bcryptjs": "3.0.2",
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"geist": "^1.4.2",
"mongodb": "6.19.0",
"postgres": "^3.4.5",
@@ -51,7 +51,7 @@
"@biomejs/biome": "2.0.0-beta.5",
"@next/env": "15.4.1",
"@types/bcryptjs": "3.0.0",
"drizzle-kit": "^0.31.1",
"drizzle-kit": "^0.31.4",
"husky": "9.1.7",
"lint-staged": "16.0.0",
"turbo": "2.5.6"

View File

@@ -25,14 +25,14 @@
"type-check": "tsc --noEmit"
},
"peerDependencies": {
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.5"
},
"devDependencies": {
"typescript": "^5.7.3"
},
"overrides": {
"drizzle-orm": "^0.41.0",
"drizzle-orm": "^0.44.5",
"postgres": "^3.4.5"
}
}