Removed v3 resources - needs more time to cook

This commit is contained in:
Jedrzej Kosinski 2025-07-30 13:08:52 -07:00
parent 9177cfd895
commit bd367c8e54
4 changed files with 50 additions and 55 deletions

View File

@ -9,7 +9,7 @@ from comfy_api.latest._input_impl import VideoFromFile, VideoFromComponents
from comfy_api.latest._util import VideoCodec, VideoContainer, VideoComponents
from comfy_api.latest._io import _IO as io #noqa: F401
from comfy_api.latest._ui import _UI as ui #noqa: F401
from comfy_api.latest._resources import _RESOURCES as resources #noqa: F401
# from comfy_api.latest._resources import _RESOURCES as resources #noqa: F401
from comfy_execution.utils import get_executing_context
from comfy_execution.progress import get_progress_state, PreviewImageTuple
from PIL import Image

View File

@ -6,7 +6,7 @@ from comfy_api.latest import (
)
from typing import Type, TYPE_CHECKING
from comfy_api.internal.async_to_sync import create_sync_class
from comfy_api.latest import io, ui, resources #noqa: F401
from comfy_api.latest import io, ui #noqa: F401
class ComfyAPIAdapter_v0_0_2(ComfyAPI_latest):

View File

@ -1,6 +1,6 @@
import torch
import time
from comfy_api.latest import io, ui, resources, _io
from comfy_api.latest import io, ui, _io
import logging # noqa
import folder_paths
import comfy.utils
@ -86,54 +86,54 @@ class V3TestNode(io.ComfyNode):
return io.NodeOutput(some_int, image, ui=ui.PreviewImage(image, cls=cls))
class V3LoraLoader(io.ComfyNode):
@classmethod
def define_schema(cls):
return io.Schema(
node_id="V3_LoraLoader",
display_name="V3 LoRA Loader",
category="v3 nodes",
description="LoRAs are used to modify diffusion and CLIP models, altering the way in which latents are denoised such as applying styles. Multiple LoRA nodes can be linked together.",
inputs=[
io.Model.Input("model", tooltip="The diffusion model the LoRA will be applied to."),
io.Clip.Input("clip", tooltip="The CLIP model the LoRA will be applied to."),
io.Combo.Input(
"lora_name",
options=folder_paths.get_filename_list("loras"),
tooltip="The name of the LoRA."
),
io.Float.Input(
"strength_model",
default=1.0,
min=-100.0,
max=100.0,
step=0.01,
tooltip="How strongly to modify the diffusion model. This value can be negative."
),
io.Float.Input(
"strength_clip",
default=1.0,
min=-100.0,
max=100.0,
step=0.01,
tooltip="How strongly to modify the CLIP model. This value can be negative."
),
],
outputs=[
io.Model.Output(),
io.Clip.Output(),
],
)
# class V3LoraLoader(io.ComfyNode):
# @classmethod
# def define_schema(cls):
# return io.Schema(
# node_id="V3_LoraLoader",
# display_name="V3 LoRA Loader",
# category="v3 nodes",
# description="LoRAs are used to modify diffusion and CLIP models, altering the way in which latents are denoised such as applying styles. Multiple LoRA nodes can be linked together.",
# inputs=[
# io.Model.Input("model", tooltip="The diffusion model the LoRA will be applied to."),
# io.Clip.Input("clip", tooltip="The CLIP model the LoRA will be applied to."),
# io.Combo.Input(
# "lora_name",
# options=folder_paths.get_filename_list("loras"),
# tooltip="The name of the LoRA."
# ),
# io.Float.Input(
# "strength_model",
# default=1.0,
# min=-100.0,
# max=100.0,
# step=0.01,
# tooltip="How strongly to modify the diffusion model. This value can be negative."
# ),
# io.Float.Input(
# "strength_clip",
# default=1.0,
# min=-100.0,
# max=100.0,
# step=0.01,
# tooltip="How strongly to modify the CLIP model. This value can be negative."
# ),
# ],
# outputs=[
# io.Model.Output(),
# io.Clip.Output(),
# ],
# )
@classmethod
def execute(cls, model: io.Model.Type, clip: io.Clip.Type, lora_name: str, strength_model: float, strength_clip: float, **kwargs):
if strength_model == 0 and strength_clip == 0:
return io.NodeOutput(model, clip)
# @classmethod
# def execute(cls, model: io.Model.Type, clip: io.Clip.Type, lora_name: str, strength_model: float, strength_clip: float, **kwargs):
# if strength_model == 0 and strength_clip == 0:
# return io.NodeOutput(model, clip)
lora = cls.resources.get(resources.TorchDictFolderFilename("loras", lora_name))
# lora = cls.resources.get(resources.TorchDictFolderFilename("loras", lora_name))
model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
return io.NodeOutput(model_lora, clip_lora)
# model_lora, clip_lora = comfy.sd.load_lora_for_models(model, clip, lora, strength_model, strength_clip)
# return io.NodeOutput(model_lora, clip_lora)
class NInputsTest(io.ComfyNode):
@ -276,7 +276,7 @@ class V3DummyEndInherit(V3DummyEnd):
NODES_LIST: list[type[io.ComfyNode]] = [
V3TestNode,
V3LoraLoader,
# V3LoraLoader,
NInputsTest,
V3TestSleep,
V3DummyStart,

View File

@ -33,7 +33,7 @@ from comfy_execution.validation import validate_node_input
from comfy_execution.progress import get_progress_state, reset_progress_state, add_progress_handler, WebUIProgressHandler
from comfy_execution.utils import CurrentNodeContext
from comfy_api.internal import _ComfyNodeInternal, _NodeOutputInternal, first_real_override, is_class, make_locked_method_func
from comfy_api.latest import io, resources
from comfy_api.latest import io
class ExecutionResult(Enum):
@ -256,11 +256,6 @@ async def _async_map_node_over_list(prompt_id, unique_id, obj, input_data_all, f
type_obj = type(obj)
type_obj.VALIDATE_CLASS()
class_clone = type_obj.PREPARE_CLASS_CLONE(hidden_inputs)
# NOTE: this is a mock of resource management; for local, just stores ResourcesLocal on node instance
if hasattr(obj, "local_resources"):
if obj.local_resources is None:
obj.local_resources = resources.ResourcesLocal()
class_clone.resources = obj.local_resources
f = make_locked_method_func(type_obj, func, class_clone)
# V1
else: