Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 22 additions & 6 deletions app/assets/database/bulk_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,14 +92,23 @@ def seed_from_paths_batch(
session.execute(ins_asset, chunk)

# try to claim AssetCacheState (file_path)
winners_by_path: set[str] = set()
# Insert with ON CONFLICT DO NOTHING, then query to find which paths were actually inserted
ins_state = (
sqlite.insert(AssetCacheState)
.on_conflict_do_nothing(index_elements=[AssetCacheState.file_path])
.returning(AssetCacheState.file_path)
)
for chunk in _iter_chunks(state_rows, _rows_per_stmt(3)):
winners_by_path.update((session.execute(ins_state, chunk)).scalars().all())
session.execute(ins_state, chunk)

# Query to find which of our paths won (were actually inserted)
winners_by_path: set[str] = set()
for chunk in _iter_chunks(path_list, MAX_BIND_PARAMS):
result = session.execute(
sqlalchemy.select(AssetCacheState.file_path)
.where(AssetCacheState.file_path.in_(chunk))
.where(AssetCacheState.asset_id.in_([path_to_asset[p] for p in chunk]))
)
winners_by_path.update(result.scalars().all())

all_paths_set = set(path_list)
losers_by_path = all_paths_set - winners_by_path
Expand All @@ -112,16 +121,23 @@ def seed_from_paths_batch(
return {"inserted_infos": 0, "won_states": 0, "lost_states": len(losers_by_path)}

# insert AssetInfo only for winners
# Insert with ON CONFLICT DO NOTHING, then query to find which were actually inserted
winner_info_rows = [asset_to_info[path_to_asset[p]] for p in winners_by_path]
ins_info = (
sqlite.insert(AssetInfo)
.on_conflict_do_nothing(index_elements=[AssetInfo.asset_id, AssetInfo.owner_id, AssetInfo.name])
.returning(AssetInfo.id)
)
for chunk in _iter_chunks(winner_info_rows, _rows_per_stmt(9)):
session.execute(ins_info, chunk)

# Query to find which info rows were actually inserted (by matching our generated IDs)
all_info_ids = [row["id"] for row in winner_info_rows]
inserted_info_ids: set[str] = set()
for chunk in _iter_chunks(winner_info_rows, _rows_per_stmt(9)):
inserted_info_ids.update((session.execute(ins_info, chunk)).scalars().all())
for chunk in _iter_chunks(all_info_ids, MAX_BIND_PARAMS):
result = session.execute(
sqlalchemy.select(AssetInfo.id).where(AssetInfo.id.in_(chunk))
)
inserted_info_ids.update(result.scalars().all())

# build and insert tag + meta rows for the AssetInfo
tag_rows: list[dict] = []
Expand Down
21 changes: 13 additions & 8 deletions comfy/float.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,26 +69,31 @@ def stochastic_rounding(value, dtype, seed=0):

# TODO: improve this?
def stochastic_float_to_fp4_e2m1(x, generator):
orig_shape = x.shape
sign = torch.signbit(x).to(torch.uint8)
x_abs = x.abs()

exp = torch.floor(torch.log2(x_abs) + 1.0).clamp(0, 3)
exp = torch.floor(torch.log2(x.abs()) + 1.0).clamp(0, 3)
x += (torch.rand(x.size(), dtype=x.dtype, layout=x.layout, device=x.device, generator=generator) - 0.5) * (2 ** (exp - 2.0)) * 1.25

x_abs = x.abs()
exp = torch.floor(torch.log2(x_abs) + 1.1925).clamp(0, 3)
x = x.abs()
exp = torch.floor(torch.log2(x) + 1.1925).clamp(0, 3)

mantissa = torch.where(
exp > 0,
(x_abs / (2.0 ** (exp - 1)) - 1.0) * 2.0,
(x_abs * 2.0)
(x / (2.0 ** (exp - 1)) - 1.0) * 2.0,
(x * 2.0),
out=x
).round().to(torch.uint8)
del x

exp = exp.to(torch.uint8)

fp4 = (sign << 3) | (exp.to(torch.uint8) << 1) | mantissa
fp4 = (sign << 3) | (exp << 1) | mantissa
del sign, exp, mantissa

fp4_flat = fp4.view(-1)
packed = (fp4_flat[0::2] << 4) | fp4_flat[1::2]
return packed.reshape(list(x.shape)[:-1] + [-1])
return packed.reshape(list(orig_shape)[:-1] + [-1])


def to_blocked(input_matrix, flatten: bool = True) -> torch.Tensor:
Expand Down
2 changes: 1 addition & 1 deletion comfy/supported_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -845,7 +845,7 @@ class LTXAV(LTXV):

def __init__(self, unet_config):
super().__init__(unet_config)
self.memory_usage_factor = 0.061 # TODO
self.memory_usage_factor = 0.077 # TODO

def get_model(self, state_dict, prefix="", device=None):
out = model_base.LTXAV(self, device=device)
Expand Down
2 changes: 1 addition & 1 deletion comfyui_version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# This file is automatically generated by the build process when version is
# updated in pyproject.toml.
__version__ = "0.8.2"
__version__ = "0.9.1"
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "ComfyUI"
version = "0.8.2"
version = "0.9.1"
readme = "README.md"
license = { file = "LICENSE" }
requires-python = ">=3.10"
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
comfyui-frontend-package==1.36.13
comfyui-frontend-package==1.36.14
comfyui-workflow-templates==0.8.4
comfyui-embedded-docs==0.4.0
torch
Expand Down
Loading