Skip to content

Commit

Permalink
preliminary kolors integration, custom pipeline improvements #11
Browse files Browse the repository at this point in the history
  • Loading branch information
Teriks committed Jan 30, 2025
1 parent e93a9b2 commit 63b2c91
Show file tree
Hide file tree
Showing 48 changed files with 1,073 additions and 460 deletions.
1 change: 1 addition & 0 deletions dgenerate/console/recipes/1.recipe
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ title: Stable Diffusion 3
--model-type torch-sd3 {{ auth_token }}
@dropdown[{"label":"Model dtype", "arg":"--dtype", "options":["float16", "float32"], "default":"float16"}]
@dropdown[{"label":"Model variant", "arg":"--variant", "options":["fp16"], "default":"fp16"}]
@karrasscheduler[{"filter": ["FlowMatchEulerDiscreteScheduler"]}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":1, "divider-after":true}]
@torchvae[{"label":"VAE Path / URI", "dir":true, "file-types":["models"]}]
@switch[{"label":"VAE Slicing", "arg":"--vae-slicing", "divider-after":true}]
Expand Down
97 changes: 72 additions & 25 deletions dgenerate/console/recipes/10.recipe
Original file line number Diff line number Diff line change
@@ -1,26 +1,73 @@
title: Stable Cascade (UNet lite)

@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "stabilityai/stable-cascade-prior", "optional":false, "file-types":["models"]}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":0, "divider-after":true}]
--model-type torch-s-cascade
--variant bf16
--dtype bfloat16
@uri[{"label":"Decoder Path / URI", "dir":true, "arg":"--s-cascade-decoder", "default":"stabilityai/stable-cascade;dtype=float16", "file-types":["models"]}]
@switchradio[{"labels":["Decoder CPU Offload", "Decoder Sequential Offload"], "args":["--s-cascade-decoder-cpu-offload", "--s-cascade-decoder-sequential-offload"], "default":0, "divider-after":true}]
@dir[{"label":"UNet Directory / URI", "arg":"--unet", "default":"stabilityai/stable-cascade-prior;subfolder=prior_lite"}]
@dir[{"label":"Decoder UNet Directory / URI", "arg":"--unet2", "default":"stabilityai/stable-cascade;subfolder=decoder_lite", "divider-after":true}]
@file[{"label":"Image Seed", "arg":"--image-seeds", "file-types":["images-in", "videos-in"]}]
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":20, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":4, "min":0}]
@int[{"label":"Decoder Inference Steps", "arg":"--s-cascade-decoder-inference-steps", "default":10, "min":1}]
@float[{"label":"Decoder Guidance Scale", "arg":"--s-cascade-decoder-guidance-scales", "default":0, "min":0}]
@seeds[{"label":"Seeds"}]
@int[{"label":"Batch Size", "arg":"--batch-size", "default":"", "min":1}]
@imagesize[{"label":"Batch Grid Size (CxR)", "arg":"--batch-grid-size", "default":"", "divider-after":true}]
@dir[{"label":"Output Directory", "arg":"--output-path", "default":"output"}]
@imagesize[{"label":"Output Size (WxH)", "arg":"--output-size", "default":"1024x1024"}]
@dropdown[{"label":"Prompt Weighter", "arg":"--prompt-weighter", "options":["compel", "compel;syntax=sdwui", "sd-embed"]}]
title: Deep Floyd

# DeepFloyd requires a multistage generation process involving
# multiple models and more advanced use of dgenerate

# You need a huggingface account (http://huggingface.co) and to
# request access to the models at (https://huggingface.co/DeepFloyd)
# in order for dgenerate to be able to download the required models

# once you have done this, provide your access token
# from (https://huggingface.co/settings/tokens)

# Or set the environmental variable HF_TOKEN on your system

\set prompt "add your prompt here"

\setp auth_token "@string[{"label": "Hugging Face Auth Token", "default":"$HF_TOKEN", "optional":false}]"

\set device @device[{"optional":false}]

\set output_dir @dir[{"label":"Output Directory", "arg":"--output-path", "default":"output", "optional":false, "divider-after":true}]

\set auth_token {{ '--auth-token ' + quote(auth_token) if auth_token else '' }}

@uri[{"label":"Stage 1 Model Path / HF Slug", "default": "DeepFloyd/IF-I-M-v1.0", "optional":false, "dir":true, "file-types":["models"]}]
@karrasscheduler[{"label":"Stage 1 Scheduler", "filter": ["DDPMScheduler"]}]
--variant fp16
--dtype float16
--model-type torch-if
--model-sequential-offload
@int[{"label":"Stage 1 Inference Steps", "arg":"--inference-steps", "default":60, "min":1}]
@float[{"label":"Stage 1 Guidance Scale", "arg":"--guidance-scales", "default":7, "min":0}]
--output-size 64
@seeds[{"label":"Seeds", "divider-after":true}]
--prompts {{ prompt }}
--output-prefix stage1 {{ device }} {{ output_dir }} {{ auth_token }}

\save_modules stage_1_modules feature_extractor

@uri[{"label":"Stage 2 Model Path / HF Slug", "default": "DeepFloyd/IF-II-M-v1.0", "optional":false, "dir":true, "file-types":["models"]}]
@karrasscheduler[{"label":"Stage 2 Scheduler", "filter": ["DDPMScheduler"]}]
--variant fp16
--dtype float16
--model-type torch-ifs
--model-sequential-offload
@int[{"label":"Stage 2 Inference Steps", "arg":"--inference-steps", "default":30, "min":1}]
@float[{"label":"Stage 2 Guidance Scale", "arg":"--guidance-scales", "default":4, "min":0}]
@int[{"label":"Stage 2 Upscaler Noise Level", "arg":"--upscaler-noise-levels", "default":250, "min":1, "divider-after":true}]
--prompts {{ format_prompt(last_prompts) }}
--seeds {{ last_seeds | join(' ') }}
--seeds-to-images
--image-seeds {{ quote(last_images) }}
--output-prefix stage2 {{ device }} {{ output_dir }} {{ auth_token }}

\use_modules stage_1_modules

@uri[{"label":"Stage 3 - x4 Upscaler Model Path / HF Slug", "default": "stabilityai/stable-diffusion-x4-upscaler", "optional":false, "dir":true, "file-types":["models"]}]
--variant fp16
--dtype float16
--model-type torch-upscaler-x4
@karrasscheduler[{"label":"Stage 3 Scheduler"}]
@torchvae[{"label":"Stage 3 VAE File / URI", "dir":true, "file-types":["models"]}]
@int[{"label":"Stage 3 Inference Steps", "arg":"--inference-steps", "default":30, "min":1}]
@float[{"label":"Stage 3 Guidance Scale", "arg":"--guidance-scales", "default":9, "min":0}]
--prompts {{ format_prompt(last_prompts) }}
--seeds {{ last_seeds | join(' ') }}
--seeds-to-images
--image-seeds {{ quote(last_images) }}
@int[{"label":"Stage 3 Upscaler Noise Level", "arg":"--upscaler-noise-levels", "default":20, "min":1, "divider-after":true}]
@imageprocessor[{"arg":"--post-processors", "label":"Post Processor"}]
@device[{}]
--prompts "add your prompt here"
--output-prefix stage3 {{ device }} {{ output_dir }} {{ auth_token }}

\clear_modules stage_1_modules
45 changes: 19 additions & 26 deletions dgenerate/console/recipes/11.recipe
Original file line number Diff line number Diff line change
@@ -1,35 +1,28 @@
title: Flux (Schnell)
title: Stable Cascade

# Flux requires a huggingface auth token to access
# you must request access to the repository

\setp auth_token "@string[{"label": "Hugging Face Auth Token", "default":"$HF_TOKEN", "optional":false}]"

\set auth_token {{ '--auth-token ' + quote(auth_token) if auth_token else '' }}

@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "black-forest-labs/FLUX.1-schnell", "optional":false, "file-types":["models"]}]
--model-type torch-flux {{ auth_token }}
@dropdown[{"label":"Model dtype", "arg":"--dtype", "options":["bfloat16", "float16", "float32"], "default":"bfloat16"}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":1, "divider-after":true}]
@torchvae[{"label":"VAE Path / URI", "dir":true, "file-types":["models"]}]
@switch[{"label":"VAE Tiling", "arg":"--vae-tiling"}]
@switch[{"label":"VAE Slicing", "arg":"--vae-slicing", "divider-after":true}]
@uriwithfloat[{"label":"LoRA Path / URI", "float-label":"LoRA Scale", "arg":"--loras", "float-arg":"scale", "dir":true, "file-types":["models"]}]
@fluxcontrolnet[{"label":"ControlNet Path / URI", "float-label":"ControlNet Scale", "mode-label":"ControlNet Union Mode", "arg":"--control-nets", "float-arg":"scale", "dir":true, "file-types":["models"]}]
@uri[{"label":"Transformer Path / URI", "dir":true, "arg":"--transformer", "file-types":["models"], "divider-after":true}]
@imageseed[{"label":"Image Seed", "arg":"--image-seeds", "file-types":["images-in", "videos-in"], "float-label":"Image Seed Strength", "float-arg":"--image-seed-strengths", "min":0.01, "max":1, "default":"", "float":""}]
@switch[{"label":"No Aspect Correction?", "arg":"--no-aspect"}]
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor"}]
@imageprocessor[{"arg":"--mask-image-processors", "label":"Inpaint Mask Processor"}]
@imageprocessor[{"arg":"--control-image-processors", "label":"Control Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":4, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":0, "min":0}]
@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "stabilityai/stable-cascade-prior", "optional":false, "file-types":["models"]}]
@karrasscheduler[{"filter": ["DDPMWuerstchenScheduler"]}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":0, "divider-after":true}]
--model-type torch-s-cascade
--variant bf16
--dtype bfloat16
@uri[{"label":"Decoder Path / URI", "dir":true, "arg":"--s-cascade-decoder", "default":"stabilityai/stable-cascade;dtype=float16", "file-types":["models"]}]
@karrasscheduler[{"label": "Decoder Scheduler", "arg": "--s-cascade-decoder-scheduler", "filter": ["DDPMWuerstchenScheduler"]}]
@switchradio[{"labels":["Decoder CPU Offload", "Decoder Sequential Offload"], "args":["--s-cascade-decoder-cpu-offload", "--s-cascade-decoder-sequential-offload"], "default":0, "divider-after":true}]
@dir[{"label":"UNet Directory / URI", "arg":"--unet"}]
@dir[{"label":"Decoder UNet Directory / URI", "arg":"--unet2", "divider-after":true}]
@file[{"label":"Image Seed", "arg":"--image-seeds", "file-types":["images-in", "videos-in"]}]
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":20, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":4, "min":0}]
@int[{"label":"Decoder Inference Steps", "arg":"--s-cascade-decoder-inference-steps", "default":10, "min":1}]
@float[{"label":"Decoder Guidance Scale", "arg":"--s-cascade-decoder-guidance-scales", "default":0, "min":0}]
@seeds[{"label":"Seeds"}]
@int[{"label":"Batch Size", "arg":"--batch-size", "default":"", "min":1}]
@imagesize[{"label":"Batch Grid Size (CxR)", "arg":"--batch-grid-size", "default":"", "divider-after":true}]
@dir[{"label":"Output Directory", "arg":"--output-path", "default":"output"}]
@imagesize[{"label":"Output Size (WxH)", "arg":"--output-size", "default":"1024x1024"}]
@dropdown[{"label":"Prompt Weighter", "arg":"--prompt-weighter", "options":["sd-embed"]}]
@dropdown[{"label":"Prompt Weighter", "arg":"--prompt-weighter", "options":["compel", "compel;syntax=sdwui", "sd-embed"]}]
@imageprocessor[{"arg":"--post-processors", "label":"Post Processor"}]
@device[{}]
--prompts "add your prompt here"
44 changes: 18 additions & 26 deletions dgenerate/console/recipes/12.recipe
Original file line number Diff line number Diff line change
@@ -1,36 +1,28 @@
title: Flux (Schnell quantized)
title: Stable Cascade (UNet lite)

# Flux requires a huggingface auth token to access
# you must request access to the repository

\setp auth_token "@string[{"label": "Hugging Face Auth Token", "default":"$HF_TOKEN", "optional":false}]"

\set auth_token {{ '--auth-token ' + quote(auth_token) if auth_token else '' }}

@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "black-forest-labs/FLUX.1-schnell", "optional":false, "file-types":["models"]}]
--model-type torch-flux {{ auth_token }}
@dropdown[{"label":"Model dtype", "arg":"--dtype", "options":["bfloat16", "float16", "float32"], "default":"bfloat16"}]
@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "stabilityai/stable-cascade-prior", "optional":false, "file-types":["models"]}]
@karrasscheduler[{"filter": ["DDPMWuerstchenScheduler"]}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":0, "divider-after":true}]
@torchvae[{"label":"VAE Path / URI", "dir":true, "file-types":["models"]}]
@switch[{"label":"VAE Tiling", "arg":"--vae-tiling"}]
@switch[{"label":"VAE Slicing", "arg":"--vae-slicing", "divider-after":true}]
@uriwithfloat[{"label":"LoRA Path / URI", "float-label":"LoRA Scale", "arg":"--loras", "float-arg":"scale", "dir":true, "file-types":["models"]}]
@fluxcontrolnet[{"label":"ControlNet Path / URI", "float-label":"ControlNet Scale", "mode-label":"ControlNet Union Mode", "arg":"--control-nets", "float-arg":"scale", "dir":true, "file-types":["models"]}]
@uri[{"label":"Transformer Path / URI", "default":"https://huggingface.co/Kijai/flux-fp8/blob/main/flux1-schnell-fp8-e4m3fn.safetensors;quantize=qfloat8", "dir":true, "arg":"--transformer", "file-types":["models"], "divider-after":true}]
--text-encoders + T5EncoderModel;model=black-forest-labs/FLUX.1-schnell;subfolder=text_encoder_2;quantize=qfloat8
@imageseed[{"label":"Image Seed", "arg":"--image-seeds", "file-types":["images-in", "videos-in"], "float-label":"Image Seed Strength", "float-arg":"--image-seed-strengths", "min":0.01, "max":1, "default":"", "float":""}]
@switch[{"label":"No Aspect Correction?", "arg":"--no-aspect"}]
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor"}]
@imageprocessor[{"arg":"--mask-image-processors", "label":"Inpaint Mask Processor"}]
@imageprocessor[{"arg":"--control-image-processors", "label":"Control Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":4, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":0, "min":0}]
--model-type torch-s-cascade
--variant bf16
--dtype bfloat16
@uri[{"label":"Decoder Path / URI", "dir":true, "arg":"--s-cascade-decoder", "default":"stabilityai/stable-cascade;dtype=float16", "file-types":["models"]}]
@karrasscheduler[{"label": "Decoder Scheduler", "arg": "--s-cascade-decoder-scheduler", "filter": ["DDPMWuerstchenScheduler"]}]
@switchradio[{"labels":["Decoder CPU Offload", "Decoder Sequential Offload"], "args":["--s-cascade-decoder-cpu-offload", "--s-cascade-decoder-sequential-offload"], "default":0, "divider-after":true}]
@dir[{"label":"UNet Directory / URI", "arg":"--unet", "default":"stabilityai/stable-cascade-prior;subfolder=prior_lite"}]
@dir[{"label":"Decoder UNet Directory / URI", "arg":"--unet2", "default":"stabilityai/stable-cascade;subfolder=decoder_lite", "divider-after":true}]
@file[{"label":"Image Seed", "arg":"--image-seeds", "file-types":["images-in", "videos-in"]}]
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":20, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":4, "min":0}]
@int[{"label":"Decoder Inference Steps", "arg":"--s-cascade-decoder-inference-steps", "default":10, "min":1}]
@float[{"label":"Decoder Guidance Scale", "arg":"--s-cascade-decoder-guidance-scales", "default":0, "min":0}]
@seeds[{"label":"Seeds"}]
@int[{"label":"Batch Size", "arg":"--batch-size", "default":"", "min":1}]
@imagesize[{"label":"Batch Grid Size (CxR)", "arg":"--batch-grid-size", "default":"", "divider-after":true}]
@dir[{"label":"Output Directory", "arg":"--output-path", "default":"output"}]
@imagesize[{"label":"Output Size (WxH)", "arg":"--output-size", "default":"1024x1024"}]
@dropdown[{"label":"Prompt Weighter", "arg":"--prompt-weighter", "options":["sd-embed"]}]
@dropdown[{"label":"Prompt Weighter", "arg":"--prompt-weighter", "options":["compel", "compel;syntax=sdwui", "sd-embed"]}]
@imageprocessor[{"arg":"--post-processors", "label":"Post Processor"}]
@device[{}]
--prompts "add your prompt here"
9 changes: 5 additions & 4 deletions dgenerate/console/recipes/13.recipe
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
title: Flux (Dev)
title: Flux (Schnell)

# Flux requires a huggingface auth token to access
# you must request access to the repository
Expand All @@ -7,9 +7,10 @@ title: Flux (Dev)

\set auth_token {{ '--auth-token ' + quote(auth_token) if auth_token else '' }}

@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "black-forest-labs/FLUX.1-dev", "optional":false, "file-types":["models"]}]
@uri[{"label":"Model Path / HF Slug", "dir":true, "default": "black-forest-labs/FLUX.1-schnell", "optional":false, "file-types":["models"]}]
--model-type torch-flux {{ auth_token }}
@dropdown[{"label":"Model dtype", "arg":"--dtype", "options":["bfloat16", "float16", "float32"], "default":"bfloat16"}]
@karrasscheduler[{"filter": ["FlowMatchEulerDiscreteScheduler"]}]
@switchradio[{"labels":["Model CPU Offload", "Model Sequential Offload"], "args":["--model-cpu-offload", "--model-sequential-offload"], "default":1, "divider-after":true}]
@torchvae[{"label":"VAE Path / URI", "dir":true, "file-types":["models"]}]
@switch[{"label":"VAE Tiling", "arg":"--vae-tiling"}]
Expand All @@ -22,8 +23,8 @@ title: Flux (Dev)
@imageprocessor[{"arg":"--seed-image-processors", "label":"Seed Image Processor"}]
@imageprocessor[{"arg":"--mask-image-processors", "label":"Inpaint Mask Processor"}]
@imageprocessor[{"arg":"--control-image-processors", "label":"Control Image Processor", "divider-after":true}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":50, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":3.5, "min":0}]
@int[{"label":"Inference Steps", "arg":"--inference-steps", "default":4, "min":1}]
@float[{"label":"Guidance Scale", "arg":"--guidance-scales", "default":0, "min":0}]
@seeds[{"label":"Seeds"}]
@int[{"label":"Batch Size", "arg":"--batch-size", "default":"", "min":1}]
@imagesize[{"label":"Batch Grid Size (CxR)", "arg":"--batch-grid-size", "default":"", "divider-after":true}]
Expand Down
Loading

0 comments on commit 63b2c91

Please sign in to comment.