Don't stop workflow if loading embedding fails.

This commit is contained in:
comfyanonymous 2023-04-14 13:54:00 -04:00
parent 61e7767ca6
commit 334aab05e5

View File

@ -2,6 +2,7 @@ import os
from transformers import CLIPTokenizer, CLIPTextModel, CLIPTextConfig from transformers import CLIPTokenizer, CLIPTextModel, CLIPTextConfig
import torch import torch
import traceback
class ClipTokenWeightEncoder: class ClipTokenWeightEncoder:
def encode_token_weights(self, token_weight_pairs): def encode_token_weights(self, token_weight_pairs):
@ -194,6 +195,7 @@ def load_embed(embedding_name, embedding_directory):
embed_path = valid_file embed_path = valid_file
try:
if embed_path.lower().endswith(".safetensors"): if embed_path.lower().endswith(".safetensors"):
import safetensors.torch import safetensors.torch
embed = safetensors.torch.load_file(embed_path, device="cpu") embed = safetensors.torch.load_file(embed_path, device="cpu")
@ -202,6 +204,12 @@ def load_embed(embedding_name, embedding_directory):
embed = torch.load(embed_path, weights_only=True, map_location="cpu") embed = torch.load(embed_path, weights_only=True, map_location="cpu")
else: else:
embed = torch.load(embed_path, map_location="cpu") embed = torch.load(embed_path, map_location="cpu")
except Exception as e:
print(traceback.format_exc())
print()
print("error loading embedding, skipping loading:", embedding_name)
return None
if 'string_to_param' in embed: if 'string_to_param' in embed:
values = embed['string_to_param'].values() values = embed['string_to_param'].values()
else: else: