Skip to content

Commit 8ffc170

Browse files
committed
simple modify
1 parent 3ca7dc5 commit 8ffc170

2 files changed

Lines changed: 2 additions & 2 deletions

File tree

examples/dreambooth/train_dreambooth_lora_flux2.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1256,7 +1256,7 @@ def main(args):
12561256
if args.lora_layers is not None:
12571257
target_modules = [layer.strip() for layer in args.lora_layers.split(",")]
12581258
else:
1259-
# target_modules = ["to_k", "to_q", "to_v", "to_out.0"] - just train transformer_blocks
1259+
# target_modules = ["to_k", "to_q", "to_v", "to_out.0"] # just train transformer_blocks
12601260

12611261
# train transformer_blocks and single_transformer_blocks
12621262
target_modules = ["to_k", "to_q", "to_v", "to_out.0"] + [

examples/dreambooth/train_dreambooth_lora_flux2_img2img.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1206,7 +1206,7 @@ def main(args):
12061206
if args.lora_layers is not None:
12071207
target_modules = [layer.strip() for layer in args.lora_layers.split(",")]
12081208
else:
1209-
# target_modules = ["to_k", "to_q", "to_v", "to_out.0"] - just train transformer_blocks
1209+
# target_modules = ["to_k", "to_q", "to_v", "to_out.0"] # just train transformer_blocks
12101210

12111211
# train transformer_blocks and single_transformer_blocks
12121212
target_modules = ["to_k", "to_q", "to_v", "to_out.0"] + [

0 commit comments

Comments
 (0)