diff --git a/invokeai/frontend/web/src/features/prompt/PromptTriggerSelect.tsx b/invokeai/frontend/web/src/features/prompt/PromptTriggerSelect.tsx index effe5769e9..da12a1930f 100644 --- a/invokeai/frontend/web/src/features/prompt/PromptTriggerSelect.tsx +++ b/invokeai/frontend/web/src/features/prompt/PromptTriggerSelect.tsx @@ -42,19 +42,6 @@ export const PromptTriggerSelect = memo(({ onSelect, onClose }: PromptTriggerSel const options = useMemo(() => { const _options: GroupBase[] = []; - if (tiModels) { - const embeddingOptions = tiModels - .filter((ti) => ti.base === mainModelConfig?.base) - .map((model) => ({ label: model.name, value: `<${model.name}>` })); - - if (embeddingOptions.length > 0) { - _options.push({ - label: t('prompt.compatibleEmbeddings'), - options: embeddingOptions, - }); - } - } - if (loraModels) { const triggerPhraseOptions = loraModels .filter((lora) => map(addedLoRAs, (l) => l.model.key).includes(lora.key)) @@ -74,6 +61,19 @@ export const PromptTriggerSelect = memo(({ onSelect, onClose }: PromptTriggerSel } } + if (tiModels) { + const embeddingOptions = tiModels + .filter((ti) => ti.base === mainModelConfig?.base) + .map((model) => ({ label: model.name, value: `<${model.name}>` })); + + if (embeddingOptions.length > 0) { + _options.push({ + label: t('prompt.compatibleEmbeddings'), + options: embeddingOptions, + }); + } + } + if (mainModelConfig && isNonRefinerMainModelConfig(mainModelConfig) && mainModelConfig.trigger_phrases?.length) { _options.push({ label: t('modelManager.mainModelTriggerPhrases'),