Skip to content

Commit

Permalink
enable creating embedding with --medvram
Browse files Browse the repository at this point in the history
  • Loading branch information
AUTOMATIC1111 committed Oct 26, 2022
1 parent ee73341 commit cbb857b
Showing 1 changed file with 3 additions and 0 deletions.
3 changes: 3 additions & 0 deletions modules/textual_inversion/textual_inversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,9 @@ def create_embedding(name, num_vectors_per_token, overwrite_old, init_text='*'):
cond_model = shared.sd_model.cond_stage_model
embedding_layer = cond_model.wrapped.transformer.text_model.embeddings

with devices.autocast():
cond_model([""]) # will send cond model to GPU if lowvram/medvram is active

ids = cond_model.tokenizer(init_text, max_length=num_vectors_per_token, return_tensors="pt", add_special_tokens=False)["input_ids"]
embedded = embedding_layer.token_embedding.wrapped(ids.to(devices.device)).squeeze(0)
vec = torch.zeros((num_vectors_per_token, embedded.shape[1]), device=devices.device)
Expand Down

0 comments on commit cbb857b

Please sign in to comment.