Skip to content

Commit

Permalink
Compel: due to damian0815/compel#45 we will momentarily finalise this…
Browse files Browse the repository at this point in the history
… by truncating prompts
  • Loading branch information
bghira committed Jul 19, 2023
1 parent ca90448 commit 1cc6ee9
Showing 1 changed file with 2 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def __init__(self, pipeline, device, use_second_encoder: bool = False):
self.pipeline.text_encoder,
self.pipeline.text_encoder_2
],
truncate_long_prompts=False,
truncate_long_prompts=True,
device=device,
returned_embeddings_type=ReturnedEmbeddingsType.PENULTIMATE_HIDDEN_STATES_NON_NORMALIZED,
requires_pooled=[
Expand Down Expand Up @@ -63,7 +63,7 @@ def is_valid_pipeline(self, pipeline):
def process_long_prompt(self, positive_prompt: str, negative_prompt: str):
batch_size = config.maximum_batch_size()
if self.has_dual_text_encoders(self.pipeline):
logging.debug(f'Running dual encoder Compel pipeline.')
logging.debug(f'Running dual encoder Compel pipeline for batch size {batch_size}.')
# We need to make a list of positive_prompt * batch_size count.
positive_prompt = [positive_prompt] * batch_size
conditioning, pooled_embed = self.compel(positive_prompt)
Expand Down

0 comments on commit 1cc6ee9

Please sign in to comment.