Compare commits

...

8 Commits

Author SHA1 Message Date
Zamil Majdy
2fc2e3bbd8 fix(migration): only backfill endedAt for terminal executions
- Only set endedAt for COMPLETED, FAILED, TERMINATED statuses
- Leave endedAt as NULL for QUEUED, RUNNING, INCOMPLETE, REVIEW
- Add proper schema prefix "platform"
- Prevents incorrectly marking in-progress executions as ended
2026-01-13 12:34:19 -06:00
Zamil Majdy
3b6dc48033 update migration 2026-01-13 12:30:25 -06:00
Zamil Majdy
3cab0c1240 fix(frontend): make started_at and ended_at nullable in types
- Fixed manually maintained types.ts to match reality: started_at and ended_at are nullable
- Updated all usages to handle null values properly with defensive checks
- Fixed sorting/filtering code to handle null timestamps
- This exposes and fixes real bugs where code assumed timestamps always exist
- Executions in QUEUED status don't have started_at yet
- Executions in QUEUED/RUNNING don't have ended_at yet
2026-01-13 12:23:03 -06:00
Zamil Majdy
2416975c30 docs(backend): add descriptions for started_at and ended_at fields
- Document that started_at is null when execution hasn't started (QUEUED)
- Document that ended_at is null when execution hasn't finished (QUEUED, RUNNING, INCOMPLETE, REVIEW)
- These descriptions are now visible in OpenAPI spec
2026-01-13 11:37:44 -06:00
Zamil Majdy
bb8aab7bd4 feat(backend): add endedAt field to track execution completion time
- Added endedAt field to AgentGraphExecution schema
- Set endedAt when execution reaches terminal status (COMPLETED, FAILED, TERMINATED)
- Updated from_db() to use endedAt instead of updatedAt for ended_at
- Migration backfills endedAt with updatedAt for existing records
- This fixes the issue where updatedAt changed when adding correctness scores
- Chart grouping uses createdAt (when queued), endedAt tracks when execution actually finished
2026-01-13 11:13:42 -06:00
Zamil Majdy
a04b891e1c update openapi.json 2026-01-13 10:57:34 -06:00
Zamil Majdy
a304332bea refactor(platform): simplify execution timestamps and fix analytics
- Removed created_at/updated_at from GraphExecutionMeta as they're DB metadata, not execution runtime info
- Made started_at and ended_at optional (fulfilling TODO) since executions may not have started yet
- Fixed late_execution_monitor.py to handle optional started_at with proper fallback logic
- Updated frontend AnalyticsResultsTable to show only execution runtime timestamps (started_at/ended_at)
- Updated CSV export to exclude created_at/updated_at columns
- Moved OpenAI API key validation to _process_batch (only checked when LLM is actually needed)
- Made settings global in execution_analytics_routes.py to avoid recreation
- Removed debug logging from analytics.py and ExecutionAnalyticsForm.tsx
2026-01-13 10:52:47 -06:00
Zamil Majdy
01cfac9d5a fix(platform): add timestamps to execution analytics and fix chart aggregation
## Changes

### Backend
- Add created_at and updated_at fields to GraphExecutionMeta model
- Update from_db() to properly populate timestamp fields from Prisma
- Remove duck-typing (getattr) in execution_analytics_routes.py
- Fix aggregation threshold from 3→1 executions per day
- Add comprehensive logging with [ACCURACY TRENDS] prefix using logger.info()

### Frontend
- Move timestamp display from table columns to expandable details section
- Add 4-column grid showing Created/Updated/Started/Ended timestamps
- Update CSV export to include all 4 timestamps
- Add blue disclaimer box explaining chart filters match monitoring system
- Add console logging for debugging chart issues

## Fixes
- Timestamps now properly typed and accessible in execution results
- Chart aggregation more inclusive (≥1 execution vs ≥3)
- Table no longer cluttered with timestamp columns
- Chart behavior matches scheduled monitoring system

## Testing
- Backend logs show query details with [ACCURACY TRENDS] prefix
- Frontend console logs params and response data
- Disclaimer clarifies chart shows scored executions only from last 30 days
2026-01-13 10:17:32 -06:00
17 changed files with 221 additions and 93 deletions

View File

@@ -28,6 +28,7 @@ from backend.executor.manager import get_db_async_client
from backend.util.settings import Settings from backend.util.settings import Settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
settings = Settings()
class ExecutionAnalyticsRequest(BaseModel): class ExecutionAnalyticsRequest(BaseModel):
@@ -63,6 +64,8 @@ class ExecutionAnalyticsResult(BaseModel):
score: Optional[float] score: Optional[float]
status: str # "success", "failed", "skipped" status: str # "success", "failed", "skipped"
error_message: Optional[str] = None error_message: Optional[str] = None
started_at: Optional[datetime] = None
ended_at: Optional[datetime] = None
class ExecutionAnalyticsResponse(BaseModel): class ExecutionAnalyticsResponse(BaseModel):
@@ -224,11 +227,6 @@ async def generate_execution_analytics(
) )
try: try:
# Validate model configuration
settings = Settings()
if not settings.secrets.openai_internal_api_key:
raise HTTPException(status_code=500, detail="OpenAI API key not configured")
# Get database client # Get database client
db_client = get_db_async_client() db_client = get_db_async_client()
@@ -320,6 +318,8 @@ async def generate_execution_analytics(
), ),
status="skipped", status="skipped",
error_message=None, # Not an error - just already processed error_message=None, # Not an error - just already processed
started_at=execution.started_at,
ended_at=execution.ended_at,
) )
) )
@@ -349,6 +349,9 @@ async def _process_batch(
) -> list[ExecutionAnalyticsResult]: ) -> list[ExecutionAnalyticsResult]:
"""Process a batch of executions concurrently.""" """Process a batch of executions concurrently."""
if not settings.secrets.openai_internal_api_key:
raise HTTPException(status_code=500, detail="OpenAI API key not configured")
async def process_single_execution(execution) -> ExecutionAnalyticsResult: async def process_single_execution(execution) -> ExecutionAnalyticsResult:
try: try:
# Generate activity status and score using the specified model # Generate activity status and score using the specified model
@@ -387,6 +390,8 @@ async def _process_batch(
score=None, score=None,
status="skipped", status="skipped",
error_message="Activity generation returned None", error_message="Activity generation returned None",
started_at=execution.started_at,
ended_at=execution.ended_at,
) )
# Update the execution stats # Update the execution stats
@@ -416,6 +421,8 @@ async def _process_batch(
summary_text=activity_response["activity_status"], summary_text=activity_response["activity_status"],
score=activity_response["correctness_score"], score=activity_response["correctness_score"],
status="success", status="success",
started_at=execution.started_at,
ended_at=execution.ended_at,
) )
except Exception as e: except Exception as e:
@@ -429,6 +436,8 @@ async def _process_batch(
score=None, score=None,
status="failed", status="failed",
error_message=str(e), error_message=str(e),
started_at=execution.started_at,
ended_at=execution.ended_at,
) )
# Process all executions in the batch concurrently # Process all executions in the batch concurrently

View File

@@ -104,7 +104,7 @@ async def get_accuracy_trends_and_alerts(
AND e."executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED') AND e."executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED')
{user_filter} {user_filter}
GROUP BY DATE(e."createdAt") GROUP BY DATE(e."createdAt")
HAVING COUNT(*) >= 3 -- Need at least 3 executions per day HAVING COUNT(*) >= 1 -- Include all days with at least 1 execution
), ),
trends AS ( trends AS (
SELECT SELECT

View File

@@ -153,8 +153,14 @@ class GraphExecutionMeta(BaseDbModel):
nodes_input_masks: Optional[dict[str, BlockInput]] nodes_input_masks: Optional[dict[str, BlockInput]]
preset_id: Optional[str] preset_id: Optional[str]
status: ExecutionStatus status: ExecutionStatus
started_at: datetime started_at: Optional[datetime] = Field(
ended_at: datetime None,
description="When execution started running. Null if not yet started (QUEUED).",
)
ended_at: Optional[datetime] = Field(
None,
description="When execution finished. Null if not yet completed (QUEUED, RUNNING, INCOMPLETE, REVIEW).",
)
is_shared: bool = False is_shared: bool = False
share_token: Optional[str] = None share_token: Optional[str] = None
@@ -229,10 +235,8 @@ class GraphExecutionMeta(BaseDbModel):
@staticmethod @staticmethod
def from_db(_graph_exec: AgentGraphExecution): def from_db(_graph_exec: AgentGraphExecution):
now = datetime.now(timezone.utc) start_time = _graph_exec.startedAt
# TODO: make started_at and ended_at optional end_time = _graph_exec.endedAt
start_time = _graph_exec.startedAt or _graph_exec.createdAt
end_time = _graph_exec.updatedAt or now
try: try:
stats = GraphExecutionStats.model_validate(_graph_exec.stats) stats = GraphExecutionStats.model_validate(_graph_exec.stats)
@@ -900,6 +904,14 @@ async def update_graph_execution_stats(
if status: if status:
update_data["executionStatus"] = status update_data["executionStatus"] = status
# Set endedAt when execution reaches a terminal status
terminal_statuses = [
ExecutionStatus.COMPLETED,
ExecutionStatus.FAILED,
ExecutionStatus.TERMINATED,
]
if status in terminal_statuses:
update_data["endedAt"] = datetime.now(tz=timezone.utc)
where_clause: AgentGraphExecutionWhereInput = {"id": graph_exec_id} where_clause: AgentGraphExecutionWhereInput = {"id": graph_exec_id}

View File

@@ -60,8 +60,10 @@ class LateExecutionMonitor:
if not all_late_executions: if not all_late_executions:
return "No late executions detected." return "No late executions detected."
# Sort by created time (oldest first) # Sort by started time (oldest first), with None values (unstarted) first
all_late_executions.sort(key=lambda x: x.started_at) all_late_executions.sort(
key=lambda x: x.started_at or datetime.min.replace(tzinfo=timezone.utc)
)
num_total_late = len(all_late_executions) num_total_late = len(all_late_executions)
num_queued = len(queued_late_executions) num_queued = len(queued_late_executions)
@@ -74,7 +76,7 @@ class LateExecutionMonitor:
was_truncated = num_total_late > tuncate_size was_truncated = num_total_late > tuncate_size
late_execution_details = [ late_execution_details = [
f"* `Execution ID: {exec.id}, Graph ID: {exec.graph_id}v{exec.graph_version}, User ID: {exec.user_id}, Status: {exec.status}, Created At: {exec.started_at.isoformat()}`" f"* `Execution ID: {exec.id}, Graph ID: {exec.graph_id}v{exec.graph_version}, User ID: {exec.user_id}, Status: {exec.status}, Started At: {exec.started_at.isoformat() if exec.started_at else 'Not started'}`"
for exec in truncated_executions for exec in truncated_executions
] ]

View File

@@ -0,0 +1,8 @@
-- AlterTable
ALTER TABLE "AgentGraphExecution" ADD COLUMN "endedAt" TIMESTAMP(3);
-- Set endedAt to updatedAt for existing records with terminal status only
UPDATE "AgentGraphExecution"
SET "endedAt" = "updatedAt"
WHERE "endedAt" IS NULL
AND "executionStatus" IN ('COMPLETED', 'FAILED', 'TERMINATED');

View File

@@ -383,6 +383,7 @@ model AgentGraphExecution {
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime? @updatedAt updatedAt DateTime? @updatedAt
startedAt DateTime? startedAt DateTime?
endedAt DateTime?
isDeleted Boolean @default(false) isDeleted Boolean @default(false)

View File

@@ -51,6 +51,8 @@ export function AnalyticsResultsTable({ results }: Props) {
"Execution ID", "Execution ID",
"Status", "Status",
"Score", "Score",
"Started At",
"Ended At",
"Summary Text", "Summary Text",
"Error Message", "Error Message",
]; ];
@@ -62,6 +64,8 @@ export function AnalyticsResultsTable({ results }: Props) {
result.exec_id, result.exec_id,
result.status, result.status,
result.score?.toString() || "", result.score?.toString() || "",
result.started_at ? new Date(result.started_at).toLocaleString() : "",
result.ended_at ? new Date(result.ended_at).toLocaleString() : "",
`"${(result.summary_text || "").replace(/"/g, '""')}"`, // Escape quotes in summary `"${(result.summary_text || "").replace(/"/g, '""')}"`, // Escape quotes in summary
`"${(result.error_message || "").replace(/"/g, '""')}"`, // Escape quotes in error `"${(result.error_message || "").replace(/"/g, '""')}"`, // Escape quotes in error
]); ]);
@@ -248,15 +252,13 @@ export function AnalyticsResultsTable({ results }: Props) {
)} )}
</td> </td>
<td className="px-4 py-3"> <td className="px-4 py-3">
{(result.summary_text || result.error_message) && ( <Button
<Button variant="ghost"
variant="ghost" size="small"
size="small" onClick={() => toggleRowExpansion(result.exec_id)}
onClick={() => toggleRowExpansion(result.exec_id)} >
> <EyeIcon size={16} />
<EyeIcon size={16} /> </Button>
</Button>
)}
</td> </td>
</tr> </tr>
@@ -264,6 +266,44 @@ export function AnalyticsResultsTable({ results }: Props) {
<tr> <tr>
<td colSpan={7} className="bg-gray-50 px-4 py-3"> <td colSpan={7} className="bg-gray-50 px-4 py-3">
<div className="space-y-3"> <div className="space-y-3">
{/* Timestamps section */}
<div className="grid grid-cols-2 gap-4 border-b border-gray-200 pb-3">
<div>
<Text
variant="body"
className="text-xs font-medium text-gray-600"
>
Started At:
</Text>
<Text
variant="body"
className="text-sm text-gray-700"
>
{result.started_at
? new Date(
result.started_at,
).toLocaleString()
: "—"}
</Text>
</div>
<div>
<Text
variant="body"
className="text-xs font-medium text-gray-600"
>
Ended At:
</Text>
<Text
variant="body"
className="text-sm text-gray-700"
>
{result.ended_at
? new Date(result.ended_at).toLocaleString()
: "—"}
</Text>
</div>
</div>
{result.summary_text && ( {result.summary_text && (
<div> <div>
<Text <Text

View File

@@ -541,7 +541,19 @@ export function ExecutionAnalyticsForm() {
{/* Accuracy Trends Display */} {/* Accuracy Trends Display */}
{trendsData && ( {trendsData && (
<div className="space-y-4"> <div className="space-y-4">
<h3 className="text-lg font-semibold">Execution Accuracy Trends</h3> <div className="flex items-start justify-between">
<h3 className="text-lg font-semibold">Execution Accuracy Trends</h3>
<div className="rounded-md bg-blue-50 px-3 py-2 text-xs text-blue-700">
<p className="font-medium">
Chart Filters (matches monitoring system):
</p>
<ul className="mt-1 list-inside list-disc space-y-1">
<li>Only days with 1 execution with correctness score</li>
<li>Last 30 days</li>
<li>Averages calculated from scored executions only</li>
</ul>
</div>
</div>
{/* Alert Section */} {/* Alert Section */}
{trendsData.alert && ( {trendsData.alert && (

View File

@@ -173,8 +173,9 @@ export function OldAgentLibraryView() {
if (agentRuns.length > 0) { if (agentRuns.length > 0) {
// select latest run // select latest run
const latestRun = agentRuns.reduce((latest, current) => { const latestRun = agentRuns.reduce((latest, current) => {
if (latest.started_at && !current.started_at) return current; if (!latest.started_at && !current.started_at) return latest;
else if (!latest.started_at) return latest; if (!latest.started_at) return current;
if (!current.started_at) return latest;
return latest.started_at > current.started_at ? latest : current; return latest.started_at > current.started_at ? latest : current;
}, agentRuns[0]); }, agentRuns[0]);
selectRun(latestRun.id as GraphExecutionID); selectRun(latestRun.id as GraphExecutionID);

View File

@@ -184,9 +184,11 @@ export function AgentRunsSelectorList({
))} ))}
{agentPresets.length > 0 && <Separator className="my-1" />} {agentPresets.length > 0 && <Separator className="my-1" />}
{agentRuns {agentRuns
.toSorted( .toSorted((a, b) => {
(a, b) => b.started_at.getTime() - a.started_at.getTime(), const aTime = a.started_at?.getTime() ?? 0;
) const bTime = b.started_at?.getTime() ?? 0;
return bTime - aTime;
})
.map((run) => ( .map((run) => (
<AgentRunSummaryCard <AgentRunSummaryCard
className={listItemClasses} className={listItemClasses}
@@ -199,7 +201,7 @@ export function AgentRunsSelectorList({
?.name ?.name
: null) ?? agent.name : null) ?? agent.name
} }
timestamp={run.started_at} timestamp={run.started_at ?? undefined}
selected={selectedView.id === run.id} selected={selectedView.id === run.id}
onClick={() => onSelectRun(run.id)} onClick={() => onSelectRun(run.id)}
onDelete={() => doDeleteRun(run as GraphExecutionMeta)} onDelete={() => doDeleteRun(run as GraphExecutionMeta)}

View File

@@ -120,9 +120,11 @@ export const AgentFlowList = ({
lastRun = lastRun =
runCount == 0 runCount == 0
? null ? null
: _flowRuns.reduce((a, c) => : _flowRuns.reduce((a, c) => {
a.started_at > c.started_at ? a : c, const aTime = a.started_at?.getTime() ?? 0;
); const cTime = c.started_at?.getTime() ?? 0;
return aTime > cTime ? a : c;
});
} }
return { flow, runCount, lastRun }; return { flow, runCount, lastRun };
}) })
@@ -130,10 +132,9 @@ export const AgentFlowList = ({
if (!a.lastRun && !b.lastRun) return 0; if (!a.lastRun && !b.lastRun) return 0;
if (!a.lastRun) return 1; if (!a.lastRun) return 1;
if (!b.lastRun) return -1; if (!b.lastRun) return -1;
return ( const bTime = b.lastRun.started_at?.getTime() ?? 0;
b.lastRun.started_at.getTime() - const aTime = a.lastRun.started_at?.getTime() ?? 0;
a.lastRun.started_at.getTime() return bTime - aTime;
);
}) })
.map(({ flow, runCount, lastRun }) => ( .map(({ flow, runCount, lastRun }) => (
<TableRow <TableRow

View File

@@ -29,7 +29,10 @@ export const FlowRunsStatus: React.FC<{
: statsSince; : statsSince;
const filteredFlowRuns = const filteredFlowRuns =
statsSinceTimestamp != null statsSinceTimestamp != null
? executions.filter((fr) => fr.started_at.getTime() > statsSinceTimestamp) ? executions.filter(
(fr) =>
fr.started_at && fr.started_at.getTime() > statsSinceTimestamp,
)
: executions; : executions;
return ( return (

View File

@@ -98,40 +98,43 @@ export const FlowRunsTimeline = ({
<Scatter <Scatter
key={flow.id} key={flow.id}
data={executions data={executions
.filter((e) => e.graph_id == flow.graph_id) .filter((e) => e.graph_id == flow.graph_id && e.started_at)
.map((e) => ({ .map((e) => ({
...e, ...e,
time: time:
e.started_at.getTime() + (e.stats?.node_exec_time ?? 0) * 1000, (e.started_at?.getTime() ?? 0) +
(e.stats?.node_exec_time ?? 0) * 1000,
_duration: e.stats?.node_exec_time ?? 0, _duration: e.stats?.node_exec_time ?? 0,
}))} }))}
name={flow.name} name={flow.name}
fill={`hsl(${(hashString(flow.id) * 137.5) % 360}, 70%, 50%)`} fill={`hsl(${(hashString(flow.id) * 137.5) % 360}, 70%, 50%)`}
/> />
))} ))}
{executions.map((execution) => ( {executions
<Line .filter((e) => e.started_at && e.ended_at)
key={execution.id} .map((execution) => (
type="linear" <Line
dataKey="_duration" key={execution.id}
data={[ type="linear"
{ dataKey="_duration"
...execution, data={[
time: execution.started_at.getTime(), {
_duration: 0, ...execution,
}, time: execution.started_at!.getTime(),
{ _duration: 0,
...execution, },
time: execution.ended_at.getTime(), {
_duration: execution.stats?.node_exec_time ?? 0, ...execution,
}, time: execution.ended_at!.getTime(),
]} _duration: execution.stats?.node_exec_time ?? 0,
stroke={`hsl(${(hashString(execution.graph_id) * 137.5) % 360}, 70%, 50%)`} },
strokeWidth={2} ]}
dot={false} stroke={`hsl(${(hashString(execution.graph_id) * 137.5) % 360}, 70%, 50%)`}
legendType="none" strokeWidth={2}
/> dot={false}
))} legendType="none"
/>
))}
<Legend <Legend
content={<ScrollableLegend />} content={<ScrollableLegend />}
wrapperStyle={{ wrapperStyle={{

View File

@@ -98,7 +98,11 @@ const Monitor = () => {
...(selectedFlow ...(selectedFlow
? executions.filter((v) => v.graph_id == selectedFlow.graph_id) ? executions.filter((v) => v.graph_id == selectedFlow.graph_id)
: executions), : executions),
].sort((a, b) => b.started_at.getTime() - a.started_at.getTime())} ].sort((a, b) => {
const aTime = a.started_at?.getTime() ?? 0;
const bTime = b.started_at?.getTime() ?? 0;
return bTime - aTime;
})}
selectedRun={selectedRun} selectedRun={selectedRun}
onSelectRun={(r) => setSelectedRun(r.id == selectedRun?.id ? null : r)} onSelectRun={(r) => setSelectedRun(r.id == selectedRun?.id ? null : r)}
/> />

View File

@@ -6968,6 +6968,20 @@
"error_message": { "error_message": {
"anyOf": [{ "type": "string" }, { "type": "null" }], "anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Error Message" "title": "Error Message"
},
"started_at": {
"anyOf": [
{ "type": "string", "format": "date-time" },
{ "type": "null" }
],
"title": "Started At"
},
"ended_at": {
"anyOf": [
{ "type": "string", "format": "date-time" },
{ "type": "null" }
],
"title": "Ended At"
} }
}, },
"type": "object", "type": "object",
@@ -7074,14 +7088,20 @@
}, },
"status": { "$ref": "#/components/schemas/AgentExecutionStatus" }, "status": { "$ref": "#/components/schemas/AgentExecutionStatus" },
"started_at": { "started_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Started At" { "type": "null" }
],
"title": "Started At",
"description": "When execution started running. Null if not yet started (QUEUED)."
}, },
"ended_at": { "ended_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Ended At" { "type": "null" }
],
"title": "Ended At",
"description": "When execution finished. Null if not yet completed (QUEUED, RUNNING, INCOMPLETE, REVIEW)."
}, },
"is_shared": { "is_shared": {
"type": "boolean", "type": "boolean",
@@ -7115,8 +7135,6 @@
"nodes_input_masks", "nodes_input_masks",
"preset_id", "preset_id",
"status", "status",
"started_at",
"ended_at",
"stats", "stats",
"outputs" "outputs"
], ],
@@ -7215,14 +7233,20 @@
}, },
"status": { "$ref": "#/components/schemas/AgentExecutionStatus" }, "status": { "$ref": "#/components/schemas/AgentExecutionStatus" },
"started_at": { "started_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Started At" { "type": "null" }
],
"title": "Started At",
"description": "When execution started running. Null if not yet started (QUEUED)."
}, },
"ended_at": { "ended_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Ended At" { "type": "null" }
],
"title": "Ended At",
"description": "When execution finished. Null if not yet completed (QUEUED, RUNNING, INCOMPLETE, REVIEW)."
}, },
"is_shared": { "is_shared": {
"type": "boolean", "type": "boolean",
@@ -7251,8 +7275,6 @@
"nodes_input_masks", "nodes_input_masks",
"preset_id", "preset_id",
"status", "status",
"started_at",
"ended_at",
"stats" "stats"
], ],
"title": "GraphExecutionMeta" "title": "GraphExecutionMeta"
@@ -7299,14 +7321,20 @@
}, },
"status": { "$ref": "#/components/schemas/AgentExecutionStatus" }, "status": { "$ref": "#/components/schemas/AgentExecutionStatus" },
"started_at": { "started_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Started At" { "type": "null" }
],
"title": "Started At",
"description": "When execution started running. Null if not yet started (QUEUED)."
}, },
"ended_at": { "ended_at": {
"type": "string", "anyOf": [
"format": "date-time", { "type": "string", "format": "date-time" },
"title": "Ended At" { "type": "null" }
],
"title": "Ended At",
"description": "When execution finished. Null if not yet completed (QUEUED, RUNNING, INCOMPLETE, REVIEW)."
}, },
"is_shared": { "is_shared": {
"type": "boolean", "type": "boolean",
@@ -7345,8 +7373,6 @@
"nodes_input_masks", "nodes_input_masks",
"preset_id", "preset_id",
"status", "status",
"started_at",
"ended_at",
"stats", "stats",
"outputs", "outputs",
"node_executions" "node_executions"

View File

@@ -50,7 +50,9 @@ export function ActivityItem({ execution }: Props) {
execution.status === AgentExecutionStatus.QUEUED; execution.status === AgentExecutionStatus.QUEUED;
if (isActiveStatus) { if (isActiveStatus) {
const timeAgo = formatTimeAgo(execution.started_at.toString()); const timeAgo = execution.started_at
? formatTimeAgo(execution.started_at.toString())
: "recently";
const statusText = const statusText =
execution.status === AgentExecutionStatus.QUEUED ? "queued" : "running"; execution.status === AgentExecutionStatus.QUEUED ? "queued" : "running";
return [ return [
@@ -61,7 +63,9 @@ export function ActivityItem({ execution }: Props) {
// Handle all other statuses with time display // Handle all other statuses with time display
const timeAgo = execution.ended_at const timeAgo = execution.ended_at
? formatTimeAgo(execution.ended_at.toString()) ? formatTimeAgo(execution.ended_at.toString())
: formatTimeAgo(execution.started_at.toString()); : execution.started_at
? formatTimeAgo(execution.started_at.toString())
: "recently";
let statusText = "ended"; let statusText = "ended";
switch (execution.status) { switch (execution.status) {

View File

@@ -327,8 +327,8 @@ export type GraphExecutionMeta = {
| "FAILED" | "FAILED"
| "INCOMPLETE" | "INCOMPLETE"
| "REVIEW"; | "REVIEW";
started_at: Date; started_at: Date | null;
ended_at: Date; ended_at: Date | null;
stats: { stats: {
error: string | null; error: string | null;
cost: number; cost: number;