Skip to content

Commit 75d327a

Browse files
Remove some useless code. (Comfy-Org#8812)
1 parent ee615ac commit 75d327a

File tree

1 file changed

+1
-46
lines changed

1 file changed

+1
-46
lines changed

comfy/gligen.py

Lines changed: 1 addition & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,55 +1,10 @@
11
import math
22
import torch
33
from torch import nn
4-
from .ldm.modules.attention import CrossAttention
5-
from inspect import isfunction
4+
from .ldm.modules.attention import CrossAttention, FeedForward
65
import comfy.ops
76
ops = comfy.ops.manual_cast
87

9-
def exists(val):
10-
return val is not None
11-
12-
13-
def uniq(arr):
14-
return{el: True for el in arr}.keys()
15-
16-
17-
def default(val, d):
18-
if exists(val):
19-
return val
20-
return d() if isfunction(d) else d
21-
22-
23-
# feedforward
24-
class GEGLU(nn.Module):
25-
def __init__(self, dim_in, dim_out):
26-
super().__init__()
27-
self.proj = ops.Linear(dim_in, dim_out * 2)
28-
29-
def forward(self, x):
30-
x, gate = self.proj(x).chunk(2, dim=-1)
31-
return x * torch.nn.functional.gelu(gate)
32-
33-
34-
class FeedForward(nn.Module):
35-
def __init__(self, dim, dim_out=None, mult=4, glu=False, dropout=0.):
36-
super().__init__()
37-
inner_dim = int(dim * mult)
38-
dim_out = default(dim_out, dim)
39-
project_in = nn.Sequential(
40-
ops.Linear(dim, inner_dim),
41-
nn.GELU()
42-
) if not glu else GEGLU(dim, inner_dim)
43-
44-
self.net = nn.Sequential(
45-
project_in,
46-
nn.Dropout(dropout),
47-
ops.Linear(inner_dim, dim_out)
48-
)
49-
50-
def forward(self, x):
51-
return self.net(x)
52-
538

549
class GatedCrossAttentionDense(nn.Module):
5510
def __init__(self, query_dim, context_dim, n_heads, d_head):

0 commit comments

Comments
 (0)