Merge branch 'main' into add_lora_support

This commit is contained in:
Jordan
2023-02-21 18:35:11 -08:00
committed by GitHub
4 changed files with 8 additions and 3 deletions

View File

@@ -126,7 +126,7 @@ class Generator:
seed = self.new_seed()
# Free up memory from the last generation.
clear_cuda_cache = kwargs['clear_cuda_cache'] or None
clear_cuda_cache = kwargs['clear_cuda_cache'] if 'clear_cuda_cache' in kwargs else None
if clear_cuda_cache is not None:
clear_cuda_cache()

View File

@@ -333,7 +333,7 @@ class StableDiffusionGeneratorPipeline(StableDiffusionPipeline):
16 * \
latents.size(dim=2) * latents.size(dim=3) * latents.size(dim=2) * latents.size(dim=3) * \
bytes_per_element_needed_for_baddbmm_duplication
if max_size_required_for_baddbmm > (mem_free * 3.3 / 4.0): # 3.3 / 4.0 is from old Invoke code
if max_size_required_for_baddbmm > (mem_free * 3.0 / 4.0): # 3.3 / 4.0 is from old Invoke code
self.enable_attention_slicing(slice_size='max')
else:
self.disable_attention_slicing()

View File

@@ -346,6 +346,7 @@ class Embiggen(Generator):
newinitimage = torch.from_numpy(newinitimage)
newinitimage = 2.0 * newinitimage - 1.0
newinitimage = newinitimage.to(self.model.device)
clear_cuda_cache = kwargs['clear_cuda_cache'] if 'clear_cuda_cache' in kwargs else None
tile_results = gen_img2img.generate(
prompt,
@@ -363,6 +364,7 @@ class Embiggen(Generator):
init_image = newinitimage, # notice that init_image is different from init_img
mask_image = None,
strength = strength,
clear_cuda_cache = clear_cuda_cache
)
emb_tile_store.append(tile_results[0][0])