Skip to content

Commit

Permalink
update for commandline args for btch prompts to parse string properly
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Oct 15, 2022
1 parent 58e6231 commit 7d6042b
Showing 1 changed file with 104 additions and 68 deletions.
172 changes: 104 additions & 68 deletions scripts/prompts_from_file.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
import copy
import math
import os
import sys
import traceback
import shlex

import modules.scripts as scripts
import gradio as gr
Expand All @@ -10,6 +12,75 @@
from PIL import Image
from modules.shared import opts, cmd_opts, state


def process_string_tag(tag):
return tag


def process_int_tag(tag):
return int(tag)


def process_float_tag(tag):
return float(tag)


def process_boolean_tag(tag):
return True if (tag == "true") else False


prompt_tags = {
"sd_model": None,
"outpath_samples": process_string_tag,
"outpath_grids": process_string_tag,
"prompt_for_display": process_string_tag,
"prompt": process_string_tag,
"negative_prompt": process_string_tag,
"styles": process_string_tag,
"seed": process_int_tag,
"subseed_strength": process_float_tag,
"subseed": process_int_tag,
"seed_resize_from_h": process_int_tag,
"seed_resize_from_w": process_int_tag,
"sampler_index": process_int_tag,
"batch_size": process_int_tag,
"n_iter": process_int_tag,
"steps": process_int_tag,
"cfg_scale": process_float_tag,
"width": process_int_tag,
"height": process_int_tag,
"restore_faces": process_boolean_tag,
"tiling": process_boolean_tag,
"do_not_save_samples": process_boolean_tag,
"do_not_save_grid": process_boolean_tag
}


def cmdargs(line):
args = shlex.split(line)
pos = 0
res = {}

while pos < len(args):
arg = args[pos]

assert arg.startswith("--"), f'must start with "--": {arg}'
tag = arg[2:]

func = prompt_tags.get(tag, None)
assert func, f'unknown commandline option: {arg}'

assert pos+1 < len(args), f'missing argument for command line option {arg}'

val = args[pos+1]

res[tag] = func(val)

pos += 2

return res


class Script(scripts.Script):
def title(self):
return "Prompts from file or textbox"
Expand All @@ -28,87 +99,52 @@ def ui(self, is_img2img):
checkbox_txt.change(fn=lambda x: [gr.File.update(visible = not x), gr.TextArea.update(visible = x)], inputs=[checkbox_txt], outputs=[file, prompt_txt])
return [checkbox_txt, file, prompt_txt]

def process_string_tag(self, tag):
return tag[1:-2]

def process_int_tag(self, tag):
return int(tag)

def process_float_tag(self, tag):
return float(tag)

def process_boolean_tag(self, tag):
return True if (tag == "true") else False

prompt_tags = {
"sd_model": None,
"outpath_samples": process_string_tag,
"outpath_grids": process_string_tag,
"prompt_for_display": process_string_tag,
"prompt": process_string_tag,
"negative_prompt": process_string_tag,
"styles": process_string_tag,
"seed": process_int_tag,
"subseed_strength": process_float_tag,
"subseed": process_int_tag,
"seed_resize_from_h": process_int_tag,
"seed_resize_from_w": process_int_tag,
"sampler_index": process_int_tag,
"batch_size": process_int_tag,
"n_iter": process_int_tag,
"steps": process_int_tag,
"cfg_scale": process_float_tag,
"width": process_int_tag,
"height": process_int_tag,
"restore_faces": process_boolean_tag,
"tiling": process_boolean_tag,
"do_not_save_samples": process_boolean_tag,
"do_not_save_grid": process_boolean_tag
}

def on_show(self, checkbox_txt, file, prompt_txt):
return [ gr.Checkbox.update(visible = True), gr.File.update(visible = not checkbox_txt), gr.TextArea.update(visible = checkbox_txt) ]

def run(self, p, checkbox_txt, data: bytes, prompt_txt: str):
if (checkbox_txt):
if checkbox_txt:
lines = [x.strip() for x in prompt_txt.splitlines()]
else:
lines = [x.strip() for x in data.decode('utf8', errors='ignore').split("\n")]
lines = [x for x in lines if len(x) > 0]

img_count = len(lines) * p.n_iter
batch_count = math.ceil(img_count / p.batch_size)
loop_count = math.ceil(batch_count / p.n_iter)
# These numbers no longer accurately reflect the total images and number of batches
print(f"Will process {img_count} images in {batch_count} batches.")

p.do_not_save_grid = True

state.job_count = batch_count
job_count = 0
jobs = []

for line in lines:
if "--" in line:
try:
args = cmdargs(line)
except Exception:
print(f"Error parsing line [line] as commandline:", file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
args = {"prompt": line}
else:
args = {"prompt": line}

images = []
for loop_no in range(loop_count):
state.job = f"{loop_no + 1} out of {loop_count}"
# The following line may need revising to remove batch_size references
current_line = lines[loop_no*p.batch_size:(loop_no+1)*p.batch_size] * p.n_iter

# If the current line has no tags, parse the whole line as a prompt, else parse each tag
if(current_line[0][:2] != "--"):
p.prompt = current_line
n_iter = args.get("n_iter", 1)
if n_iter != 1:
job_count += n_iter
else:
tokenized_line = current_line[0].split("--")

for tag in tokenized_line:
tag_split = tag.split(" ", 1)
if(tag_split[0] != ''):
value_func = self.prompt_tags.get(tag_split[0], None)
if(value_func != None):
value = value_func(self, tag_split[1])
setattr(p, tag_split[0], value)
else:
print(f"Unknown option \"{tag_split}\"")

proc = process_images(p)
job_count += 1

jobs.append(args)

print(f"Will process {len(lines)} lines in {job_count} jobs.")
state.job_count = job_count

images = []
for n, args in enumerate(jobs):
state.job = f"{state.job_no + 1} out of {state.job_count}"

copy_p = copy.copy(p)
for k, v in args.items():
setattr(copy_p, k, v)

proc = process_images(copy_p)
images += proc.images

return Processed(p, images, p.seed, "")

0 comments on commit 7d6042b

Please sign in to comment.