From cf0098d5398cc330ffa8af9a0472ad1b8610515a Mon Sep 17 00:00:00 2001 From: Francesco Yoshi Gobbo Date: Mon, 27 Mar 2023 04:51:18 +0200 Subject: [PATCH] no lowvram state if cpu only --- comfy/model_management.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/comfy/model_management.py b/comfy/model_management.py index 0d5702b91..d9498e29f 100644 --- a/comfy/model_management.py +++ b/comfy/model_management.py @@ -22,7 +22,8 @@ try: total_vram = torch.cuda.mem_get_info(torch.cuda.current_device())[1] / (1024 * 1024) total_ram = psutil.virtual_memory().total / (1024 * 1024) forced_normal_vram = "--normalvram" in sys.argv - if not forced_normal_vram: + forced_cpu = "--cpu" in sys.argv + if not forced_normal_vram and not forced_cpu: if total_vram <= 4096: print("Trying to enable lowvram mode because your GPU seems to have 4GB or less. If you don't want this use: --normalvram") set_vram_to = LOW_VRAM