Allow different models to estimate memory usage differently.

This commit is contained in:
comfyanonymous
2023-11-12 04:02:16 -05:00
parent 2c9dba8dc0
commit dd4ba68b6e
5 changed files with 19 additions and 26 deletions

View File

@@ -83,7 +83,7 @@ def prepare_sampling(model, noise_shape, positive, negative, noise_mask):
real_model = None
models, inference_memory = get_additional_models(positive, negative, model.model_dtype())
comfy.model_management.load_models_gpu([model] + models, comfy.model_management.batch_area_memory(noise_shape[0] * noise_shape[2] * noise_shape[3]) + inference_memory)
comfy.model_management.load_models_gpu([model] + models, model.memory_required(noise_shape) + inference_memory)
real_model = model.model
return real_model, positive, negative, noise_mask, models