Make sure models_memory_reserve is considered with inference_memory as well in max func calls

This commit is contained in:
Jedrzej Kosinski
2025-08-18 15:34:53 -07:00
parent 63571c6c3d
commit cd54d502fc

View File

@@ -594,11 +594,11 @@ def load_models_gpu(models, memory_required=0, force_patch_weights=False, minimu
inference_memory = minimum_inference_memory()
models_memory_reserve = get_models_memory_reserve(models)
extra_mem = max(inference_memory, memory_required + extra_reserved_memory() + models_memory_reserve)
extra_mem = max(inference_memory + models_memory_reserve, memory_required + extra_reserved_memory() + models_memory_reserve)
if minimum_memory_required is None:
minimum_memory_required = extra_mem
else:
minimum_memory_required = max(inference_memory, minimum_memory_required + extra_reserved_memory() + models_memory_reserve)
minimum_memory_required = max(inference_memory + models_memory_reserve, minimum_memory_required + extra_reserved_memory() + models_memory_reserve)
models = set(models)