Fix an issue with diffusers>0.19.3 (#1775)

This commit is contained in:
gpetters94
2023-08-18 14:06:06 -04:00
committed by GitHub
parent 8738571d1e
commit 79bd0b84a1
2 changed files with 3 additions and 3 deletions

View File

@@ -34,7 +34,7 @@ from PIL import Image
from tqdm.auto import tqdm
from transformers import CLIPFeatureExtractor, CLIPTextModel, CLIPTokenizer
from diffusers.loaders import AttnProcsLayers
from diffusers.models.cross_attention import LoRACrossAttnProcessor
from diffusers.models.attention_processor import LoRAXFormersAttnProcessor
import torch_mlir
from torch_mlir.dynamo import make_simple_dynamo_backend
@@ -287,7 +287,7 @@ def lora_train(
block_id = int(name[len("down_blocks.")])
hidden_size = unet.config.block_out_channels[block_id]
lora_attn_procs[name] = LoRACrossAttnProcessor(
lora_attn_procs[name] = LoRAXFormersAttnProcessor(
hidden_size=hidden_size,
cross_attention_dim=cross_attention_dim,
)

View File

@@ -19,7 +19,7 @@ parameterized
# Add transformers, diffusers and scipy since it most commonly used
transformers
diffusers==0.19.3
diffusers
#accelerate is now required for diffusers import from ckpt.
accelerate
scipy