From 0e836d525e7f6a3b44704402c954141af324ea47 Mon Sep 17 00:00:00 2001 From: comfyanonymous Date: Mon, 13 Mar 2023 21:12:48 -0400 Subject: [PATCH] use half() on fp16 models loaded with config. --- comfy/sd.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/comfy/sd.py b/comfy/sd.py index c7e0b073d..61d1916db 100644 --- a/comfy/sd.py +++ b/comfy/sd.py @@ -733,6 +733,12 @@ def load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, e scale_factor = model_config_params['scale_factor'] vae_config = model_config_params['first_stage_config'] + fp16 = False + if "unet_config" in model_config_params: + if "params" in model_config_params["unet_config"]: + if "use_fp16" in model_config_params["unet_config"]["params"]: + fp16 = model_config_params["unet_config"]["params"]["use_fp16"] + clip = None vae = None @@ -754,6 +760,10 @@ def load_checkpoint(config_path, ckpt_path, output_vae=True, output_clip=True, e model = instantiate_from_config(config["model"]) sd = load_torch_file(ckpt_path) model = load_model_weights(model, sd, verbose=False, load_state_dict_to=load_state_dict_to) + + if fp16: + model = model.half() + return (ModelPatcher(model), clip, vae)