fix prompt

This commit is contained in:
neecapp
2023-02-20 23:06:05 -05:00
parent de89041779
commit 3732af63e8

View File

@@ -513,6 +513,10 @@ class Generate:
except AttributeError:
pass
# lora MUST process prompt before conditioning
if self.model.lora_manager:
self.model.lora_manager.load_lora()
try:
uc, c, extra_conditioning_info = get_uc_and_c_and_ec(
prompt,
@@ -549,9 +553,6 @@ class Generate:
else None
)
if self.model.lora_manager:
self.model.lora_manager.load_lora()
results = generator.generate(
prompt,
iterations=iterations,