Replace prints with logging and add --verbose argument.

This commit is contained in:
comfyanonymous
2024-03-10 11:37:08 -04:00
parent 4656273e72
commit 65397ce601
12 changed files with 90 additions and 65 deletions

View File

@@ -8,6 +8,7 @@ import zipfile
from . import model_management
import comfy.clip_model
import json
import logging
def gen_empty_tokens(special_tokens, length):
start_token = special_tokens.get("start", None)
@@ -137,7 +138,7 @@ class SDClipModel(torch.nn.Module, ClipTokenWeightEncoder):
tokens_temp += [next_new_token]
next_new_token += 1
else:
print("WARNING: shape mismatch when trying to apply embedding, embedding will be ignored", y.shape[0], current_embeds.weight.shape[1])
logging.warning("WARNING: shape mismatch when trying to apply embedding, embedding will be ignored {} != {}".format(y.shape[0], current_embeds.weight.shape[1]))
while len(tokens_temp) < len(x):
tokens_temp += [self.special_tokens["pad"]]
out_tokens += [tokens_temp]
@@ -329,9 +330,7 @@ def load_embed(embedding_name, embedding_directory, embedding_size, embed_key=No
else:
embed = torch.load(embed_path, map_location="cpu")
except Exception as e:
print(traceback.format_exc())
print()
print("error loading embedding, skipping loading:", embedding_name)
logging.warning("{}\n\nerror loading embedding, skipping loading: {}".format(traceback.format_exc(), embedding_name))
return None
if embed_out is None:
@@ -422,7 +421,7 @@ class SDTokenizer:
embedding_name = word[len(self.embedding_identifier):].strip('\n')
embed, leftover = self._try_get_embedding(embedding_name)
if embed is None:
print(f"warning, embedding:{embedding_name} does not exist, ignoring")
logging.warning(f"warning, embedding:{embedding_name} does not exist, ignoring")
else:
if len(embed.shape) == 1:
tokens.append([(embed, weight)])