From a451c0ed1405eac3fe936f5de85faa71f3fdc50d Mon Sep 17 00:00:00 2001 From: Yaniv Galron <89192632+YanivDorGalron@users.noreply.github.com> Date: Thu, 23 Jan 2025 23:55:33 +0200 Subject: [PATCH] removing redundant requires_grad = False (#10628) We already set the unet to requires grad false at line 506 Co-authored-by: Aryan --- examples/text_to_image/train_text_to_image_lora.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/examples/text_to_image/train_text_to_image_lora.py b/examples/text_to_image/train_text_to_image_lora.py index e7f2f5c4c881..82c395c685f8 100644 --- a/examples/text_to_image/train_text_to_image_lora.py +++ b/examples/text_to_image/train_text_to_image_lora.py @@ -515,10 +515,6 @@ def main(): elif accelerator.mixed_precision == "bf16": weight_dtype = torch.bfloat16 - # Freeze the unet parameters before adding adapters - for param in unet.parameters(): - param.requires_grad_(False) - unet_lora_config = LoraConfig( r=args.rank, lora_alpha=args.rank,