mirror of
https://github.com/invoke-ai/InvokeAI.git
synced 2026-02-14 03:25:29 -05:00
feat(ui): add conditionally-enabled workflow publishing ui
This is a squash of a lot of scattered commits that became very difficult to clean up and make individually. Sorry. Besides the new UI, there are a number of notable changes: - Publishing logic is disabled in OSS by default. To enable it, provided a `disabledFeatures` prop _without_ "publishWorkflow". - Enqueuing a workflow is no longer handled in a redux listener. It was hard to track the state of the enqueue logic in the listener. It is now in a hook. I did not migrate the canvas and upscaling tabs - their enqueue logic is still in the listener. - When queueing a validation run, the new `useEnqueueWorkflows()` hook will update the payload with the required data for the run. - Some logic is added to the socket event listeners to handle workflow publish runs completing. - The workflow library side nav has a new "published" view. It is hidden when the "publishWorkflow" feature is disabled. - I've added `Safe` and `OrThrow` versions of some workflows hooks. These hooks typically retrieve some data from redux. For example, a node. The `Safe` hooks return the node or null if it cannot be found, while the `OrThrow` hooks return the node or raise if it cannot be found. The `OrThrow` hooks should be used within one of the gate components. These components use the `Safe` hooks and render a fallback if e.g. the node isn't found. This change is required for some of the publish flow UI. - Add support for locking the workflow editor. When locked, you can pan and zoom but that's it. Currently, it is only locked during publish flow and if a published workflow is opened.
This commit is contained in:
@@ -0,0 +1,156 @@
|
||||
import { createAction } from '@reduxjs/toolkit';
|
||||
import { useAppStore } from 'app/store/nanostores/store';
|
||||
import {
|
||||
$outputNodeId,
|
||||
getPublishInputs,
|
||||
selectFieldIdentifiersWithInvocationTypes,
|
||||
} from 'features/nodes/components/sidePanel/workflow/publish';
|
||||
import { $templates } from 'features/nodes/store/nodesSlice';
|
||||
import { selectNodeData, selectNodesSlice } from 'features/nodes/store/selectors';
|
||||
import { isBatchNode, isInvocationNode } from 'features/nodes/types/invocation';
|
||||
import { buildNodesGraph } from 'features/nodes/util/graph/buildNodesGraph';
|
||||
import { resolveBatchValue } from 'features/nodes/util/node/resolveBatchValue';
|
||||
import { buildWorkflowWithValidation } from 'features/nodes/util/workflow/buildWorkflow';
|
||||
import { groupBy } from 'lodash-es';
|
||||
import { useCallback } from 'react';
|
||||
import { enqueueMutationFixedCacheKeyOptions, queueApi } from 'services/api/endpoints/queue';
|
||||
import type { Batch, EnqueueBatchArg } from 'services/api/types';
|
||||
import { assert } from 'tsafe';
|
||||
|
||||
const enqueueRequestedWorkflows = createAction('app/enqueueRequestedWorkflows');
|
||||
|
||||
export const useEnqueueWorkflows = () => {
|
||||
const { getState, dispatch } = useAppStore();
|
||||
const enqueue = useCallback(
|
||||
async (prepend: boolean, isApiValidationRun: boolean) => {
|
||||
dispatch(enqueueRequestedWorkflows());
|
||||
const state = getState();
|
||||
const nodesState = selectNodesSlice(state);
|
||||
const workflow = state.workflow;
|
||||
const templates = $templates.get();
|
||||
const graph = buildNodesGraph(state, templates);
|
||||
const builtWorkflow = buildWorkflowWithValidation({
|
||||
nodes: nodesState.nodes,
|
||||
edges: nodesState.edges,
|
||||
workflow,
|
||||
});
|
||||
|
||||
if (builtWorkflow) {
|
||||
// embedded workflows don't have an id
|
||||
delete builtWorkflow.id;
|
||||
}
|
||||
|
||||
const data: Batch['data'] = [];
|
||||
|
||||
const invocationNodes = nodesState.nodes.filter(isInvocationNode);
|
||||
const batchNodes = invocationNodes.filter(isBatchNode);
|
||||
|
||||
// Handle zipping batch nodes. First group the batch nodes by their batch_group_id
|
||||
const groupedBatchNodes = groupBy(batchNodes, (node) => node.data.inputs['batch_group_id']?.value);
|
||||
|
||||
// Then, we will create a batch data collection item for each group
|
||||
for (const [batchGroupId, batchNodes] of Object.entries(groupedBatchNodes)) {
|
||||
const zippedBatchDataCollectionItems: NonNullable<Batch['data']>[number] = [];
|
||||
|
||||
for (const node of batchNodes) {
|
||||
const value = await resolveBatchValue({ nodesState, node, dispatch });
|
||||
const sourceHandle = node.data.type === 'image_batch' ? 'image' : 'value';
|
||||
const edgesFromBatch = nodesState.edges.filter(
|
||||
(e) => e.source === node.id && e.sourceHandle === sourceHandle
|
||||
);
|
||||
if (batchGroupId !== 'None') {
|
||||
// If this batch node has a batch_group_id, we will zip the data collection items
|
||||
for (const edge of edgesFromBatch) {
|
||||
if (!edge.targetHandle) {
|
||||
break;
|
||||
}
|
||||
zippedBatchDataCollectionItems.push({
|
||||
node_path: edge.target,
|
||||
field_name: edge.targetHandle,
|
||||
items: value,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Otherwise add the data collection items to root of the batch so they are not zipped
|
||||
const productBatchDataCollectionItems: NonNullable<Batch['data']>[number] = [];
|
||||
for (const edge of edgesFromBatch) {
|
||||
if (!edge.targetHandle) {
|
||||
break;
|
||||
}
|
||||
productBatchDataCollectionItems.push({
|
||||
node_path: edge.target,
|
||||
field_name: edge.targetHandle,
|
||||
items: value,
|
||||
});
|
||||
}
|
||||
if (productBatchDataCollectionItems.length > 0) {
|
||||
data.push(productBatchDataCollectionItems);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Finally, if this batch data collection item has any items, add it to the data array
|
||||
if (batchGroupId !== 'None' && zippedBatchDataCollectionItems.length > 0) {
|
||||
data.push(zippedBatchDataCollectionItems);
|
||||
}
|
||||
}
|
||||
|
||||
const batchConfig: EnqueueBatchArg = {
|
||||
batch: {
|
||||
graph,
|
||||
workflow: builtWorkflow,
|
||||
runs: state.params.iterations,
|
||||
origin: 'workflows',
|
||||
destination: 'gallery',
|
||||
data,
|
||||
},
|
||||
prepend,
|
||||
};
|
||||
|
||||
if (isApiValidationRun) {
|
||||
// Derive the input fields from the builder's selected node field elements
|
||||
const fieldIdentifiers = selectFieldIdentifiersWithInvocationTypes(state);
|
||||
const inputs = getPublishInputs(fieldIdentifiers, templates);
|
||||
const api_input_fields = inputs.publishable.map(({ nodeId, fieldName }) => {
|
||||
return {
|
||||
kind: 'input',
|
||||
node_id: nodeId,
|
||||
field_name: fieldName,
|
||||
} as const;
|
||||
});
|
||||
|
||||
// Derive the output fields from the builder's selected output node
|
||||
const outputNodeId = $outputNodeId.get();
|
||||
assert(outputNodeId !== null, 'Output node not selected');
|
||||
const outputNodeType = selectNodeData(selectNodesSlice(state), outputNodeId).type;
|
||||
const outputNodeTemplate = templates[outputNodeType];
|
||||
assert(outputNodeTemplate, `Template for node type ${outputNodeType} not found`);
|
||||
const outputFieldNames = Object.keys(outputNodeTemplate.outputs);
|
||||
const api_output_fields = outputFieldNames.map((fieldName) => {
|
||||
return {
|
||||
kind: 'output',
|
||||
node_id: outputNodeId,
|
||||
field_name: fieldName,
|
||||
} as const;
|
||||
});
|
||||
|
||||
batchConfig.is_api_validation_run = true;
|
||||
batchConfig.api_input_fields = api_input_fields;
|
||||
batchConfig.api_output_fields = api_output_fields;
|
||||
|
||||
// If the batch is an API validation run, we only want to run it once
|
||||
batchConfig.batch.runs = 1;
|
||||
}
|
||||
|
||||
const req = dispatch(
|
||||
queueApi.endpoints.enqueueBatch.initiate(batchConfig, { ...enqueueMutationFixedCacheKeyOptions, track: false })
|
||||
);
|
||||
|
||||
const enqueueResult = await req.unwrap();
|
||||
return { batchConfig, enqueueResult };
|
||||
},
|
||||
[dispatch, getState]
|
||||
);
|
||||
|
||||
return enqueue;
|
||||
};
|
||||
Reference in New Issue
Block a user