Reduce more memories on free_gpu_mem option (#1915)

* Enhance free_gpu_mem option
Unload cond_stage_model on free_gpu_mem option is setted

* Enhance free_gpu_mem option
Unload cond_stage_model on free_gpu_mem option is setted
This commit is contained in:
rmagur1203
2022-12-12 03:49:55 +09:00
committed by GitHub
parent f745f78cb3
commit bd0c0d77d2
3 changed files with 17 additions and 3 deletions

View File

@@ -6,6 +6,7 @@ import torch
import numpy as np
from ldm.invoke.generator.base import Generator
from ldm.models.diffusion.shared_invokeai_diffusion import InvokeAIDiffuserComponent
import gc
class Txt2Img(Generator):
@@ -55,7 +56,11 @@ class Txt2Img(Generator):
)
if self.free_gpu_mem:
self.model.model.to("cpu")
self.model.model.to('cpu')
self.model.cond_stage_model.device = 'cpu'
self.model.cond_stage_model.to('cpu')
gc.collect()
torch.cuda.empty_cache()
return self.sample_to_image(samples)

View File

@@ -100,7 +100,11 @@ class Txt2Img2Img(Generator):
)
if self.free_gpu_mem:
self.model.model.to("cpu")
self.model.model.to('cpu')
self.model.cond_stage_model.device = 'cpu'
self.model.cond_stage_model.to('cpu')
gc.collect()
torch.cuda.empty_cache()
return self.sample_to_image(samples)
@@ -142,7 +146,7 @@ class Txt2Img2Img(Generator):
**kwargs
)
return result[0][0]
if sampler.uses_inpainting_model():
return inpaint_make_image
else: