From 6b774589a5a77a9adf186e2d686891044877e29d Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Wed, 14 Jun 2023 12:48:02 -0400 Subject: [PATCH] Set model to fp16 before loading the state dict to lower ram bump. --- comfy/sd.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/comfy/sd.py b/comfy/sd.py index 5237df3a0..db04e0426 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -1155,9 +1155,9 @@ def load_checkpoint_guess_config(ckpt_path, output_vae=True, output_clip=True, o else: model = model_base.BaseModel(unet_config, v_prediction=v_prediction) - model = load_model_weights(model, sd, verbose=False, load_state_dict_to=load_state_dict_to) - if fp16: model = model.half() + model = load_model_weights(model, sd, verbose=False, load_state_dict_to=load_state_dict_to) + return (ModelPatcher(model), clip, vae, clipvision)