Low vram mode for controlnets.

This commit is contained in:
comfyanonymous
2023-02-17 15:45:29 -05:00
parent 220a72d36b
commit d66415c021
2 changed files with 21 additions and 0 deletions

View File

@@ -102,6 +102,12 @@ def load_model_gpu(model):
def load_controlnet_gpu(models):
global current_gpu_controlnets
global vram_state
if vram_state == LOW_VRAM or vram_state == NO_VRAM:
#don't load controlnets like this if low vram because they will be loaded right before running and unloaded right after
return
for m in current_gpu_controlnets:
if m not in models:
m.cpu()
@@ -111,6 +117,19 @@ def load_controlnet_gpu(models):
current_gpu_controlnets.append(m.cuda())
def load_if_low_vram(model):
global vram_state
if vram_state == LOW_VRAM or vram_state == NO_VRAM:
return model.cuda()
return model
def unload_if_low_vram(model):
global vram_state
if vram_state == LOW_VRAM or vram_state == NO_VRAM:
return model.cpu()
return model
def get_free_memory():
dev = torch.cuda.current_device()
stats = torch.cuda.memory_stats(dev)