Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion app/assets/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,8 @@ def get_comfy_models_folders() -> list[tuple[str, list[str]]]:
"""
targets: list[tuple[str, list[str]]] = []
models_root = os.path.abspath(folder_paths.models_dir)
for name, (paths, _exts) in folder_paths.folder_names_and_paths.items():
for name, values in folder_paths.folder_names_and_paths.items():
paths, _exts = values[0], values[1] # NOTE: this prevents nodepacks that hackily edit folder_... from breaking ComfyUI
if any(os.path.abspath(p).startswith(models_root + os.sep) for p in paths):
targets.append((name, paths))
return targets
Expand Down
13 changes: 13 additions & 0 deletions comfy_api/latest/_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -1113,6 +1113,18 @@ def _expand_schema_for_dynamic(out_dict: dict[str, Any], live_inputs: dict[str,
out_dict[input_type][finalized_id] = value
out_dict["dynamic_paths"][finalized_id] = finalize_prefix(curr_prefix, curr_prefix[-1])

@comfytype(io_type="IMAGECOMPARE")
class ImageCompare(ComfyTypeI):
Type = dict

class Input(WidgetInput):
def __init__(self, id: str, display_name: str=None, optional=False, tooltip: str=None,
socketless: bool=True):
super().__init__(id, display_name, optional, tooltip, None, None, socketless)

def as_dict(self):
return super().as_dict()

DYNAMIC_INPUT_LOOKUP: dict[str, Callable[[dict[str, Any], dict[str, Any], tuple[str, dict[str, Any]], str, list[str] | None], None]] = {}
def register_dynamic_input_func(io_type: str, func: Callable[[dict[str, Any], dict[str, Any], tuple[str, dict[str, Any]], str, list[str] | None], None]):
DYNAMIC_INPUT_LOOKUP[io_type] = func
Expand Down Expand Up @@ -1958,4 +1970,5 @@ def as_dict(self) -> dict:
"add_to_dict_v1",
"add_to_dict_v3",
"V3Data",
"ImageCompare",
]
31 changes: 18 additions & 13 deletions comfy_execution/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@ class JobStatus:
IN_PROGRESS = 'in_progress'
COMPLETED = 'completed'
FAILED = 'failed'
CANCELLED = 'cancelled'

ALL = [PENDING, IN_PROGRESS, COMPLETED, FAILED]
ALL = [PENDING, IN_PROGRESS, COMPLETED, FAILED, CANCELLED]


# Media types that can be previewed in the frontend
Expand Down Expand Up @@ -94,19 +95,14 @@ def normalize_history_item(prompt_id: str, history_item: dict, include_outputs:

status_info = history_item.get('status', {})
status_str = status_info.get('status_str') if status_info else None
if status_str == 'success':
status = JobStatus.COMPLETED
elif status_str == 'error':
status = JobStatus.FAILED
else:
status = JobStatus.COMPLETED

outputs = history_item.get('outputs', {})
outputs_count, preview_output = get_outputs_summary(outputs)

execution_error = None
execution_start_time = None
execution_end_time = None
was_interrupted = False
if status_info:
messages = status_info.get('messages', [])
for entry in messages:
Expand All @@ -119,6 +115,15 @@ def normalize_history_item(prompt_id: str, history_item: dict, include_outputs:
execution_end_time = event_data.get('timestamp')
if event_name == 'execution_error':
execution_error = event_data
elif event_name == 'execution_interrupted':
was_interrupted = True

if status_str == 'success':
status = JobStatus.COMPLETED
elif status_str == 'error':
status = JobStatus.CANCELLED if was_interrupted else JobStatus.FAILED
else:
status = JobStatus.COMPLETED

job = prune_dict({
'id': prompt_id,
Expand Down Expand Up @@ -268,13 +273,13 @@ def get_all_jobs(
for item in queued:
jobs.append(normalize_queue_item(item, JobStatus.PENDING))

include_completed = JobStatus.COMPLETED in status_filter
include_failed = JobStatus.FAILED in status_filter
if include_completed or include_failed:
history_statuses = {JobStatus.COMPLETED, JobStatus.FAILED, JobStatus.CANCELLED}
requested_history_statuses = history_statuses & set(status_filter)
if requested_history_statuses:
for prompt_id, history_item in history.items():
is_failed = history_item.get('status', {}).get('status_str') == 'error'
if (is_failed and include_failed) or (not is_failed and include_completed):
jobs.append(normalize_history_item(prompt_id, history_item))
job = normalize_history_item(prompt_id, history_item)
if job.get('status') in requested_history_statuses:
jobs.append(job)

if workflow_id:
jobs = [j for j in jobs if j.get('workflow_id') == workflow_id]
Expand Down
53 changes: 53 additions & 0 deletions comfy_extras/nodes_image_compare.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import nodes

from typing_extensions import override
from comfy_api.latest import IO, ComfyExtension


class ImageCompare(IO.ComfyNode):
"""Compares two images with a slider interface."""

@classmethod
def define_schema(cls):
return IO.Schema(
node_id="ImageCompare",
display_name="Image Compare",
description="Compares two images side by side with a slider.",
category="image",
is_experimental=True,
is_output_node=True,
inputs=[
IO.Image.Input("image_a", optional=True),
IO.Image.Input("image_b", optional=True),
IO.ImageCompare.Input("compare_view"),
],
outputs=[],
)

@classmethod
def execute(cls, image_a=None, image_b=None, compare_view=None) -> IO.NodeOutput:
result = {"a_images": [], "b_images": []}

preview_node = nodes.PreviewImage()

if image_a is not None and len(image_a) > 0:
saved = preview_node.save_images(image_a, "comfy.compare.a")
result["a_images"] = saved["ui"]["images"]

if image_b is not None and len(image_b) > 0:
saved = preview_node.save_images(image_b, "comfy.compare.b")
result["b_images"] = saved["ui"]["images"]

return IO.NodeOutput(ui=result)


class ImageCompareExtension(ComfyExtension):
@override
async def get_node_list(self) -> list[type[IO.ComfyNode]]:
return [
ImageCompare,
]


async def comfy_entrypoint() -> ImageCompareExtension:
return ImageCompareExtension()
10 changes: 6 additions & 4 deletions nodes.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,14 +378,15 @@ def INPUT_TYPES(s):
CATEGORY = "latent/inpaint"

def encode(self, vae, pixels, mask, grow_mask_by=6):
x = (pixels.shape[1] // vae.downscale_ratio) * vae.downscale_ratio
y = (pixels.shape[2] // vae.downscale_ratio) * vae.downscale_ratio
downscale_ratio = vae.spacial_compression_encode()
x = (pixels.shape[1] // downscale_ratio) * downscale_ratio
y = (pixels.shape[2] // downscale_ratio) * downscale_ratio
mask = torch.nn.functional.interpolate(mask.reshape((-1, 1, mask.shape[-2], mask.shape[-1])), size=(pixels.shape[1], pixels.shape[2]), mode="bilinear")

pixels = pixels.clone()
if pixels.shape[1] != x or pixels.shape[2] != y:
x_offset = (pixels.shape[1] % vae.downscale_ratio) // 2
y_offset = (pixels.shape[2] % vae.downscale_ratio) // 2
x_offset = (pixels.shape[1] % downscale_ratio) // 2
y_offset = (pixels.shape[2] % downscale_ratio) // 2
pixels = pixels[:,x_offset:x + x_offset, y_offset:y + y_offset,:]
mask = mask[:,:,x_offset:x + x_offset, y_offset:y + y_offset]

Expand Down Expand Up @@ -2369,6 +2370,7 @@ async def init_builtin_extra_nodes():
"nodes_nop.py",
"nodes_kandinsky5.py",
"nodes_wanmove.py",
"nodes_image_compare.py",
]

import_failed = []
Expand Down
38 changes: 37 additions & 1 deletion tests/execution/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,16 @@ def test_status_values(self):
assert JobStatus.IN_PROGRESS == 'in_progress'
assert JobStatus.COMPLETED == 'completed'
assert JobStatus.FAILED == 'failed'
assert JobStatus.CANCELLED == 'cancelled'

def test_all_contains_all_statuses(self):
"""ALL should contain all status values."""
assert JobStatus.PENDING in JobStatus.ALL
assert JobStatus.IN_PROGRESS in JobStatus.ALL
assert JobStatus.COMPLETED in JobStatus.ALL
assert JobStatus.FAILED in JobStatus.ALL
assert len(JobStatus.ALL) == 4
assert JobStatus.CANCELLED in JobStatus.ALL
assert len(JobStatus.ALL) == 5


class TestIsPreviewable:
Expand Down Expand Up @@ -336,6 +338,40 @@ def test_failed_job(self):
assert job['execution_error']['node_type'] == 'KSampler'
assert job['execution_error']['exception_message'] == 'CUDA out of memory'

def test_cancelled_job(self):
"""Cancelled/interrupted history item should have cancelled status."""
history_item = {
'prompt': (
5,
'prompt-cancelled',
{'nodes': {}},
{'create_time': 1234567890000},
['node1'],
),
'status': {
'status_str': 'error',
'completed': False,
'messages': [
('execution_start', {'prompt_id': 'prompt-cancelled', 'timestamp': 1234567890500}),
('execution_interrupted', {
'prompt_id': 'prompt-cancelled',
'node_id': '5',
'node_type': 'KSampler',
'executed': ['1', '2', '3'],
'timestamp': 1234567891000,
})
]
},
'outputs': {},
}

job = normalize_history_item('prompt-cancelled', history_item)
assert job['status'] == 'cancelled'
assert job['execution_start_time'] == 1234567890500
assert job['execution_end_time'] == 1234567891000
# Cancelled jobs should not have execution_error set
assert 'execution_error' not in job

def test_include_outputs(self):
"""When include_outputs=True, should include full output data."""
history_item = {
Expand Down
Loading