Skip to content

Commit

Permalink
Move to device when not accelerating dataloader
Browse files Browse the repository at this point in the history
  • Loading branch information
deepdelirious committed Dec 31, 2024
1 parent b7d5e1e commit 11e642b
Showing 1 changed file with 5 additions and 1 deletion.
6 changes: 5 additions & 1 deletion flux_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

init_ipex()

from accelerate.utils import set_seed
from accelerate.utils import set_seed, send_to_device
from library import deepspeed_utils, flux_train_utils, flux_utils, strategy_base, strategy_flux
from library.sd3_train_utils import FlowMatchEulerDiscreteScheduler

Expand Down Expand Up @@ -648,6 +648,10 @@ def grad_hook(parameter: torch.Tensor):
accelerator.print("NaN found in latents, replacing with zeros")
latents = torch.nan_to_num(latents, 0, out=latents)

if args.no_accelerate_dataloader:
#The accelerator wrapper of the dataloader was handling device placement, but we disabled it.
batch = send_to_device(batch, accelerator.device)

text_encoder_outputs_list = batch.get("text_encoder_outputs_list", None)
if text_encoder_outputs_list is not None:
text_encoder_conds = text_encoder_outputs_list
Expand Down

0 comments on commit 11e642b

Please sign in to comment.