From 0173443fb7831d9fd8abf1e0e23c368bfa165009 Mon Sep 17 00:00:00 2001 From: Delirious <36864043+deepdelirious@users.noreply.github.com> Date: Mon, 28 Oct 2024 12:16:23 -0400 Subject: [PATCH] Logging --- flux_train.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/flux_train.py b/flux_train.py index 5495a13b8..21083c761 100644 --- a/flux_train.py +++ b/flux_train.py @@ -218,9 +218,9 @@ def inject_embedding(model, tokenizer, placeholder, embed_file, embed_key): index = tokenizer.convert_tokens_to_ids(placeholder) if (model.get_input_embeddings().num_embeddings <= len(tokenizer)): model.resize_token_embeddings(len(tokenizer)) - print(f"Expanded model embeddings to : {model.get_input_embeddings().num_embeddings}") + logger.info(f"Expanded model embeddings to : {model.get_input_embeddings().num_embeddings}") model.get_input_embeddings().weight.data[index] = embed_state_dict[embed_key] - print(f"Added custom embedding for {placeholder} to {embed_key} as token {index}") + logger.info(f"Added custom embedding for {placeholder} to {embed_key} as token {index}") # load clip_l, t5xxl for caching text encoder outputs clip_l = flux_utils.load_clip_l(args.clip_l, weight_dtype, "cpu", args.disable_mmap_load_safetensors)