Skip to content

Commit

Permalink
SwarmLoraLoader to allow loras in custom workflows
Browse files Browse the repository at this point in the history
for #130
  • Loading branch information
mcmonkey4eva committed Oct 20, 2023
1 parent 0046b7b commit 4d13e81
Show file tree
Hide file tree
Showing 3 changed files with 61 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,8 @@ function comfyBuildParams(callback) {
claimOnce('KSamplerAdvanced', 'comfyui_scheduler', 'scheduler', false);
claimOnce('KSamplerAdvanced', 'cfg_scale', 'cfg', true);
claimOnce('LoadImage', 'initimage', 'image', false);
claimOnce('SwarmLoraLoader', 'loras', 'lora_names', false);
claimOnce('SwarmLoraLoader', 'loraweights', 'lora_weights', false);
if (node.class_type == 'CLIPTextEncode' && groupLabel.startsWith("Positive Prompt") && !defaultParamsRetain.includes('prompt') && typeof node.inputs.text == 'string') {
defaultParamsRetain.push('prompt');
defaultParamValue['prompt'] = node.inputs.text;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -374,6 +374,10 @@ string fillDynamic()
{
return image.AsBase64;
}
else if (val is List<string> list)
{
return list.JoinString(",");
}
return val.ToString();
}
long fixSeed(long input)
Expand Down
55 changes: 55 additions & 0 deletions src/BuiltinExtensions/ComfyUIBackend/ExtraNodes/SwarmLoraLoader.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import comfy
import folder_paths

class SwarmLoraLoader:
def __init__(self):
self.loaded_lora = None

@classmethod
def INPUT_TYPES(s):
return {
"required": {
"model": ("MODEL", ),
"clip": ("CLIP", ),
"lora_names": ("STRING", {"multiline": True}),
"lora_weights": ("STRING", {"multiline": True})
}
}

CATEGORY = "StableSwarmUI"
RETURN_TYPES = ("MODEL", "CLIP")
FUNCTION = "load_loras"

def load_loras(self, model, clip, lora_names, lora_weights):
if lora_names.strip() == "":
return (model, clip)

lora_names = lora_names.split(",")
lora_weights = lora_weights.split(",")
lora_weights = [float(x.strip()) for x in lora_weights]

for i in range(len(lora_names)):
lora_name = lora_names[i].strip()
weight = lora_weights[i]
if weight == 0:
continue
# This section copied directly from default comfy LoraLoader
lora_path = folder_paths.get_full_path("loras", lora_name)
lora = None
if self.loaded_lora is not None:
if self.loaded_lora[0] == lora_path:
lora = self.loaded_lora[1]
else:
temp = self.loaded_lora
self.loaded_lora = None
del temp
if lora is None:
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
self.loaded_lora = (lora_path, lora)
model, clip = comfy.sd.load_lora_for_models(model, clip, lora, weight, weight)

return (model, clip)

NODE_CLASS_MAPPINGS = {
"SwarmLoraLoader": SwarmLoraLoader,
}

0 comments on commit 4d13e81

Please sign in to comment.