Skip to content

Commit 85fc35e

Browse files
Fix mac issue. (Comfy-Org#12250)
1 parent 2233647 commit 85fc35e

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

comfy/text_encoders/llama.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -628,10 +628,10 @@ def forward(self, x, attention_mask=None, embeds=None, num_tokens=None, intermed
628628
mask = None
629629
if attention_mask is not None:
630630
mask = 1.0 - attention_mask.to(x.dtype).reshape((attention_mask.shape[0], 1, -1, attention_mask.shape[-1])).expand(attention_mask.shape[0], 1, seq_len, attention_mask.shape[-1])
631-
mask = mask.masked_fill(mask.to(torch.bool), float("-inf"))
631+
mask = mask.masked_fill(mask.to(torch.bool), torch.finfo(x.dtype).min)
632632

633633
if seq_len > 1:
634-
causal_mask = torch.empty(past_len + seq_len, past_len + seq_len, dtype=x.dtype, device=x.device).fill_(float("-inf")).triu_(1)
634+
causal_mask = torch.empty(past_len + seq_len, past_len + seq_len, dtype=x.dtype, device=x.device).fill_(torch.finfo(x.dtype).min).triu_(1)
635635
if mask is not None:
636636
mask += causal_mask
637637
else:

0 commit comments

Comments
 (0)