fix(ui): LoraManager UI causing overload

This commit is contained in:
blessedcoolant
2023-04-02 19:37:47 +12:00
parent 63ecdb19fe
commit fad6fc807b
3 changed files with 90 additions and 57 deletions

View File

@@ -2,80 +2,61 @@ import { Box } from '@chakra-ui/react';
import { getLoraModels } from 'app/socketio/actions';
import { useAppDispatch, useAppSelector } from 'app/storeHooks';
import IAISimpleMenu, { IAIMenuItem } from 'common/components/IAISimpleMenu';
import { setPrompt } from 'features/parameters/store/generationSlice';
import { useEffect, useState, useCallback } from 'react';
import { setLorasInUse } from 'features/parameters/store/generationSlice';
import { useEffect } from 'react';
import { useTranslation } from 'react-i18next';
export default function LoraManager() {
const dispatch = useAppDispatch();
const prompt = useAppSelector((state) => state.generation.prompt);
const foundLoras = useAppSelector((state) => state.system.foundLoras);
const [loraItems, setLoraItems] = useState<IAIMenuItem[]>([
{ item: '', onClick: undefined },
]);
const lorasInUse = useAppSelector((state) => state.generation.lorasInUse);
const { t } = useTranslation();
const loraExists = useCallback(
(lora: string) => {
const lora_regex = new RegExp(`withLora\\(${lora},?\\s*([\\d.]+)?\\)`);
if (prompt.match(lora_regex)) return true;
return false;
},
[prompt]
);
const handleLora = useCallback(
(lora: string) => {
if (loraExists(lora)) {
const lora_regex = new RegExp(`withLora\\(${lora},?\\s*([\\d.]+)?\\)`);
const newPrompt = prompt.replace(lora_regex, '');
dispatch(setPrompt(newPrompt));
} else {
dispatch(setPrompt(`${prompt} withLora(${lora},1)`));
}
},
[dispatch, loraExists, prompt]
);
const handleLora = (lora: string) => {
dispatch(setLorasInUse(lora));
};
useEffect(() => {
dispatch(getLoraModels());
}, [dispatch]);
const renderLoraOption = useCallback(
(lora: string) => {
console.log('renderLoraOption');
const thisloraExists = loraExists(lora);
const loraExistsStyle = {
fontWeight: 'bold',
color: 'var(--context-menu-active-item)',
};
return <Box style={thisloraExists ? loraExistsStyle : {}}>{lora}</Box>;
},
[loraExists]
);
const renderLoraOption = (lora: string) => {
const thisloraExists = lorasInUse.includes(lora);
const loraExistsStyle = {
fontWeight: 'bold',
color: 'var(--context-menu-active-item)',
};
return <Box style={thisloraExists ? loraExistsStyle : {}}>{lora}</Box>;
};
useEffect(() => {
if (foundLoras) {
console.log('renderloraoption: here i am');
const lorasFound: IAIMenuItem[] = [];
foundLoras.forEach((lora) => {
if (lora.name !== ' ') {
const newLoraItem: IAIMenuItem = {
item: renderLoraOption(lora.name),
onClick: () => handleLora(lora.name),
};
lorasFound.push(newLoraItem);
}
});
setLoraItems(lorasFound);
}
}, [foundLoras, loraItems, dispatch, prompt, handleLora, renderLoraOption]);
const numOfActiveLoras = () => {
const foundLoraNames: string[] = [];
foundLoras?.forEach((lora) => {
foundLoraNames.push(lora.name);
});
return foundLoraNames.filter((lora) => lorasInUse.includes(lora)).length;
};
const makeLoraItems = () => {
const lorasFound: IAIMenuItem[] = [];
foundLoras?.forEach((lora) => {
if (lora.name !== ' ') {
const newLoraItem: IAIMenuItem = {
item: renderLoraOption(lora.name),
onClick: () => handleLora(lora.name),
};
lorasFound.push(newLoraItem);
}
});
return lorasFound;
};
return foundLoras && foundLoras?.length > 0 ? (
<IAISimpleMenu
menuItems={loraItems}
menuItems={makeLoraItems()}
menuType="regular"
buttonText={t('modelManager.addLora')}
buttonText={`${t('modelManager.addLora')} (${numOfActiveLoras()})`}
/>
) : (
<Box

View File

@@ -17,6 +17,7 @@ export interface GenerationState {
perlin: number;
prompt: string;
negativePrompt: string;
lorasInUse: string[];
sampler: string;
seamBlur: number;
seamless: boolean;
@@ -48,6 +49,7 @@ const initialGenerationState: GenerationState = {
perlin: 0,
prompt: '',
negativePrompt: '',
lorasInUse: [],
sampler: 'k_lms',
seamBlur: 16,
seamless: false,
@@ -71,12 +73,38 @@ const initialGenerationState: GenerationState = {
const initialState: GenerationState = initialGenerationState;
const loraExists = (state: GenerationState, lora: string) => {
const lora_regex = new RegExp(`withLora\\(${lora},?\\s*([^\\)]+)?\\)`);
if (state.prompt.match(lora_regex)) return true;
return false;
};
export const generationSlice = createSlice({
name: 'generation',
initialState,
reducers: {
setPrompt: (state, action: PayloadAction<string | InvokeAI.Prompt>) => {
const newPrompt = action.payload;
// Tackle User Typed Lora Syntax
let lorasInUse = [...state.lorasInUse]; // Get Loras In Prompt
const lora_regex = /withLora\(([^\\)]+)\)/g; // Scan For Lora Syntax
if (typeof newPrompt === 'string') {
const lora_matches = [...newPrompt.matchAll(lora_regex)]; // Match All Lora Syntaxes
if (lora_matches.length > 0) {
lorasInUse = []; // Reset Loras In Use
lora_matches.forEach((lora_match) => {
const lora_name = lora_match[1].split(',')[0];
const lora_weight = lora_match[1].split(',')[1];
if (!lorasInUse.includes(lora_name)) lorasInUse.push(lora_name); // Add Loras In Prompt
});
} else {
lorasInUse = []; // If No Matches, Remove Loras In Use
}
}
state.lorasInUse = lorasInUse;
if (typeof newPrompt === 'string') {
state.prompt = newPrompt;
} else {
@@ -94,6 +122,28 @@ export const generationSlice = createSlice({
state.negativePrompt = promptToString(newPrompt);
}
},
setLorasInUse: (state, action: PayloadAction<string>) => {
const newLora = action.payload;
const loras = [...state.lorasInUse];
if (loraExists(state, newLora)) {
const lora_regex = new RegExp(
`withLora\\(${newLora},?\\s*([^\\)]+)?\\)`,
'g'
);
const newPrompt = state.prompt.replaceAll(lora_regex, '');
state.prompt = newPrompt;
if (loras.includes(newLora)) {
const newLoraIndex = loras.indexOf(newLora);
if (newLoraIndex > -1) loras.splice(newLoraIndex, 1);
}
} else {
state.prompt = `${state.prompt} withLora(${newLora},1)`;
if (!loras.includes(newLora)) loras.push(newLora);
}
state.lorasInUse = loras;
},
setIterations: (state, action: PayloadAction<number>) => {
state.iterations = action.payload;
},
@@ -374,6 +424,7 @@ export const {
setPerlin,
setPrompt,
setNegativePrompt,
setLorasInUse,
setSampler,
setSeamBlur,
setSeamless,

View File

@@ -4,6 +4,7 @@ from pathlib import Path
from typing import Optional
from ldm.invoke.globals import global_lora_models_dir
from .kohya_lora_manager import KohyaLoraManager
from typing import Optional
class LoraCondition:
name: str