diff --git a/.claude/settings.json b/.claude/settings.json deleted file mode 100644 index af21149a7..000000000 --- a/.claude/settings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(grep -r \"Load3DModel\" /Users/linmoumou/Documents/comfy/workflow_templates/templates/*.json)", - "Bash(python3 -c \":*)" - ] - } -} diff --git a/.claude/settings.local.json b/.claude/settings.local.json index c857d196a..49003128f 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -16,7 +16,12 @@ "Bash(cd C:/Code/Comfy/workflow_templates && rm fix-usernames.mjs)", "Bash(python3 << 'EOF'\nimport json\nimport os\nfrom pathlib import Path\n\ntemplates_dir = Path\\('/c/Code/Comfy/workflow_templates/site/src/content/templates'\\)\n\ntags = set\\(\\)\nmodels = set\\(\\)\ncreators = set\\(\\)\ntotal_templates = 0\n\nfor f in templates_dir.glob\\('*.json'\\):\n try:\n with open\\(f\\) as fp:\n data = json.load\\(fp\\)\n total_templates += 1\n if 'tags' in data and data['tags']:\n tags.update\\(data['tags']\\)\n if 'models' in data and data['models']:\n models.update\\(data['models']\\)\n if 'username' in data and data['username']:\n creators.add\\(data['username']\\)\n except:\n pass\n\nprint\\(f\"Total templates: {total_templates}\"\\)\nprint\\(f\"Unique tags: {len\\(tags\\)}\"\\)\nprint\\(f\"Unique models: {len\\(models\\)}\"\\)\nprint\\(f\"Unique creators: {len\\(creators\\)}\"\\)\nEOF)", "Bash(cd C:/Code/Comfy/workflow_templates/site && npx astro check 2>&1 | tail -40)", - "Bash(cd C:/Code/Comfy/workflow_templates/site && timeout 20 npx astro dev --port 4322 2>&1 | tail -30)" + "Bash(cd C:/Code/Comfy/workflow_templates/site && timeout 20 npx astro dev --port 4322 2>&1 | tail -30)", + "Bash(sed -i '' 's/\"title\": \"Bira:画像の背景を削除\"/\"title\": \"Bria:画像の背景を削除\"/' /Users/linmoumou/Documents/comfy/workflow_templates/templates/index.ja.json)", + "Bash(sed -i '' 's/\"title\": \"Bira: إزالة خلفية الصورة\"/\"title\": \"Bria: إزالة خلفية الصورة\"/' /Users/linmoumou/Documents/comfy/workflow_templates/templates/index.ar.json)", + "Bash(sed -i '' \"s/\\\\\"title\\\\\": \\\\\"Bira : Supprimer l'arrière-plan de l'image\\\\\"/\\\\\"title\\\\\": \\\\\"Bria : Supprimer l'arrière-plan de l'image\\\\\"/\" /Users/linmoumou/Documents/comfy/workflow_templates/templates/index.fr.json)", + "Bash(sed -i '' 's/\"title\": \"Bira: Eliminar fondo de imagen\"/\"title\": \"Bria: Eliminar fondo de imagen\"/' /Users/linmoumou/Documents/comfy/workflow_templates/templates/index.es.json)", + "Bash(sed -i '' 's/\"title\": \"Bira: Удалить фон изображения\"/\"title\": \"Bria: Удалить фон изображения\"/' /Users/linmoumou/Documents/comfy/workflow_templates/templates/index.ru.json)" ] } } diff --git a/.claude/skills/importing-subgraphs/SKILL.md b/.claude/skills/importing-subgraphs/SKILL.md new file mode 100644 index 000000000..94d764aaa --- /dev/null +++ b/.claude/skills/importing-subgraphs/SKILL.md @@ -0,0 +1,151 @@ +--- +name: importing-subgraphs +description: "Imports and registers subgraph blueprints into the ComfyUI workflow_templates repository. Handles placing blueprint JSON files, adding thumbnails, running the import/sync pipeline, and validating results. Use when asked to: import a subgraph, add a blueprint, register a blueprint, add a subgraph blueprint, import a subgraph blueprint, contribute a subgraph, add a new node component, publish a blueprint, upload a subgraph, create a blueprint, onboard a subgraph, add a reusable node. Triggers on: import subgraph, add blueprint, subgraph blueprint, new blueprint, register blueprint, blueprint import." +--- + +# Importing Subgraph Blueprints + +Subgraph blueprints are pre-built ComfyUI node components stored in `blueprints/` and shipped via the `comfyui-subgraph-blueprints` package. + +## Rules + +- **Never** modify scripts, build tooling, or CI configuration. +- **Always** validate after changes (Step 4). +- Blueprint filenames **must** be `snake_case` — the import script handles renaming automatically. +- Use double-quotes `"` in all JSON files. +- Blueprint JSON **must** contain a `definitions.subgraphs` array with at least one entry. + +--- + +## Step 1 — Obtain the Blueprint JSON + +Two sources: + +**Option A — Import from an external directory:** +```bash +python scripts/import_blueprints.py --source /path/to/external/blueprints/ +``` +The script copies all `*.json` files (skipping `index*.json`) into `blueprints/`, renames them to `snake_case`, regenerates `blueprints/index.json`, and updates `blueprints_bundles.json`. + +**Option B — Manual placement (single file):** +1. Export the subgraph from ComfyUI (Save → Export workflow JSON). +2. Copy the `.json` file to `blueprints/` with a `snake_case` name, e.g. `my_blueprint.json`. +3. Run the import script (no `--source` needed) to normalize and regenerate index + bundles: + ```bash + python scripts/import_blueprints.py + ``` + +### Required blueprint JSON structure + +The file must contain `definitions.subgraphs[0]` with these fields: + +| Field | Required | +|-------|----------| +| `name` | yes — display name shown in the node palette | +| `inputs` | yes — exposed input slots | +| `outputs` | yes — exposed output slots | +| `nodes` | yes — internal ComfyUI nodes | + +--- + +## Step 2 — Add a Thumbnail (Optional) + +Thumbnail files live in `blueprints/` and follow the naming pattern: + +``` +{blueprint_name}-1.webp # primary (required for thumbnail display) +{blueprint_name}-2.webp # secondary (optional, for compare/hover effects) +``` + +- Convert to **webp** format (lossy ~65% quality). +- The import script sets `"mediaSubtype": "webp"` in `index.json` automatically. + +--- + +## Step 3 — Embed Model Metadata (Recommended) + +For every model-loading node inside `definitions.subgraphs[0].nodes` (e.g. `UNETLoader`, `VAELoader`, `CLIPLoader`), add a `"models"` array to the node's `"properties"`: + +```json +"properties": { + "Node name for S&R": "UNETLoader", + "cnr_id": "comfy-core", + "ver": "0.3.40", + "models": [ + { + "name": "flux1-dev.safetensors", + "url": "https://huggingface.co/.../resolve/main/flux1-dev.safetensors?download=true", + "hash": "", + "hash_type": "SHA256", + "directory": "diffusion_models" + } + ] +} +``` + +The `name` field **must exactly match** the corresponding `widgets_values` entry. The import script surfaces model names automatically in `index.json` (limited to first 5). + +--- + +## Step 4 — Sync to Packages + +After `import_blueprints.py` succeeds, push assets into the package directory and regenerate the manifest: + +```bash +python scripts/sync_blueprints.py +``` + +This writes `packages/core/src/comfyui_workflow_templates_core/blueprints_manifest.json` and copies all blueprint files into `packages/blueprints/src/comfyui_subgraph_blueprints/blueprints/`. + +--- + +## Step 5 — Validate + +```bash +python scripts/validate_blueprints.py +``` + +Checks: +- JSON syntax for all blueprint files +- `index.json` against `index.schema.json` +- Blueprint structure (`definitions.subgraphs` present with required fields) +- `blueprints_bundles.json` consistency with files on disk + +Fix all errors before continuing. CI will fail if bundles or manifests are out of sync. + +--- + +## Step 6 — Bump Version + +Increment the `version` field in the root `pyproject.toml`. CI uses this to detect changes and publishes affected packages to PyPI. + +--- + +## Common Requests + +| User says | Agent action | +|-----------|--------------| +| "Import blueprints from this folder" | Step 1 Option A, then Steps 4–6 | +| "Add this subgraph JSON as a blueprint" | Step 1 Option B, then Steps 4–6 | +| "Add a thumbnail for blueprint X" | Step 2 only, then re-run Step 4 | +| "Embed model info into this blueprint" | Step 3 only, then re-run Steps 1, 4, 5 | +| "Validate blueprints" | Step 5 only | +| "Sync blueprints to packages" | Step 4 only | +| "Why does the index not have my blueprint?" | Check filename is snake_case, re-run `import_blueprints.py` | + +--- + +## File Quick-Reference + +| File / Dir | Purpose | +|------------|---------| +| `blueprints/` | Blueprint JSON files and thumbnail images | +| `blueprints/index.json` | Generated metadata index (do not edit manually) | +| `blueprints/index.schema.json` | JSON schema for index validation | +| `blueprints_bundles.json` | Generated list of all blueprint IDs | +| `scripts/import_blueprints.py` | Normalize filenames, generate index.json and bundles | +| `scripts/sync_blueprints.py` | Generate manifest, copy assets to package directories | +| `scripts/validate_blueprints.py` | Validate all blueprints and consistency checks | +| `pyproject.toml` | Root package version (bump before PR) | +| `packages/blueprints/` | `comfyui-subgraph-blueprints` package (generated assets) | +| `packages/core/.../blueprints_manifest.json` | Generated manifest consumed by the Python API | diff --git a/.github/.spellcheck-i18n.yml b/.github/.spellcheck-i18n.yml new file mode 100644 index 000000000..9667e0722 --- /dev/null +++ b/.github/.spellcheck-i18n.yml @@ -0,0 +1,16 @@ +matrix: +- name: i18n JSON (English fields) + sources: + - 'scripts/i18n.json' + aspell: + lang: en + dictionary: + wordlists: + - .github/.wordlist.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.context: + context_visible_first: true + delimiters: + - open: '"en":\s*"' + close: '"' diff --git a/.github/.spellcheck-workflows.yml b/.github/.spellcheck-workflows.yml new file mode 100644 index 000000000..50fd9c6f8 --- /dev/null +++ b/.github/.spellcheck-workflows.yml @@ -0,0 +1,21 @@ +matrix: +- name: Workflow Notes + sources: + - '/tmp/workflow_notes.txt' + aspell: + lang: en + dictionary: + wordlists: + - .github/.wordlist.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.markdown: + markdown_extensions: + - pymdownx.superfences: + - pyspelling.filters.html: + comments: false + ignores: + - code + - pre + - a + - pyspelling.filters.url: diff --git a/.github/.spellcheck.yml b/.github/.spellcheck.yml new file mode 100644 index 000000000..4d206f247 --- /dev/null +++ b/.github/.spellcheck.yml @@ -0,0 +1,18 @@ +matrix: +- name: Index JSON Files + sources: + - 'templates/index.json' + aspell: + lang: en + dictionary: + wordlists: + - .github/.wordlist.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.context: + context_visible_first: true + delimiters: + - open: '"title":\s*"' + close: '"' + - open: '"description":\s*"' + close: '"' diff --git a/.github/.wordlist.txt b/.github/.wordlist.txt new file mode 100644 index 000000000..a659ca365 --- /dev/null +++ b/.github/.wordlist.txt @@ -0,0 +1,725 @@ +# Custom dictionary for ComfyUI workflow templates +ComfyUI +ComfyUI's +Comfy +comfyui +comfyanonymous +webp +WEBP +png +jpeg +jpg +MP4 +mp4 +mp3 +workflow +workflows +VAE +astra +vae +UNETLoader +CLIPLoader +VAELoader +SaveWEBM +huggingface +embeddings +UNET +safetensors +repackaged +fp8 +bf16 +xxl +thumbnails +Wan +ltxv +cnr +diffusion +PyPi +pyproject +toml +PySpelling +Zod +e4m3fn +t2v +s2v +i2v +t2i +i2i +modelFile +Github +Pinia +composables +Vitest +Playwright +eslint +prettier +EzGif +ezgif +gif +markdown +Markdownlint +nodejs +npm +pnpm +repo +utils +changelog +encoders +diffusers +configs +metadata +SHA256 +bytedance +seedream4 +flf2v +bfl +kontext +vercel +Qwen +qwen +subgraphed +moduleName +mediaType +mediaSubtype +tutorialUrl +thumbnailVariant +hoverDissolve +hoverZoom +compareSlider +isEssential +LoRA +lora +inpaint +inpainting +upscaling +upscale +ControlNet +controlnet +SDXL +sdxl +Flux +flux +Hunyuan3D +hunyuan +ACE +upscaler +detailer +img2img +latents +IPAdapter +AnimateDiff +AnimateLCM +LCM +LTX +LTXV +CogVideo +CogVideoX +GGUF +gguf +fp16 +SD1 +SD3 +SD3.5 +Latte +Mochi +Mamba +SORA +GenAI +StyleGan +StyleGAN +stylization +VideoLLM +controlnets +DepthMap +depthmap +Canny +OpenPose +ComfyOrg +Comfy-Org +Safetensor +PyTorch +pytorch +cuda +CUDA +VRAM +vram +GPU +cpu +checkpoints +OSS +API +api +JSON +json +UI +mediafiles +videogen +imagegen +audiogen +quantized +quantization +MarkdownNote +civitai +majicmixRealistic +japaneseStyleRealistic +openpose +mse +ema +pruned +stabilityai +fp +v11p +v7 +v20 +sd15 +vae-ft-mse +safetensor +workflows +href +url +github +README +readme +workaround +KeepIntermediate +LANs +LoadImage +MetadataImage +SaveVideo +ViduImageToVideoNode +bgcolor +config +df +ds +eb +frontendVersion +latentpreview +latentpreviewrate +nTo +pos +ue +unconnectable +ver +vidu +viduq +ImageToVideo +VideoNode +metadata +frontend +backend +latent +unconnected +# 3D / rendering domain terms +overbaked +topologized +# AI / ML models and tools +ACG +AceStep +Alibaba +Alvdansen +Anima +AuraFlow +BFL +Capybara +CausVid +Chroma +ChronoEdit +DWPose +Danbooru +DiffSynth +FBX +Fanghua +Fannovel +FireRed +GAN +GIMM +HED +HiDream +HuMo +Huanyuan +HunyuanVideo +IPAdapters +InstantX +Julien +KJNodes +Kijai +Kling +Lightricks +LLM +LoadAudio +LongCat +Lumina +MelBandRoFormer +Modelscope +Moonvalley +Nano +NetaYumev +NewBie +NormalCrafterWrapper +OOM +OneReward +Ovis +RealESRGAN +Recraft +Redux +SCAIL +SUPIR +SaveAudioMP +SaveGLB +SeedVR +Sora +TTS +VACE +WanAnimate +WanAnimateToVideo +WanInfiniteTalkToVideo +WanMoveTracksFromCoords +WanVaceToVideo +WebM +# Node names and technical identifiers +AudioCrop +AudioTools +ImageCompositeMasked +ImageScaleToMaxDimension +MaskEditor +MelBandRoFormerModelLoader +MelBandRoFormerSampler +ModelSamplingAuraFlow +PrimitiveNode +PixelSort +# Common abbreviations used in workflow notes +ae +cfg +CFG +ctrl +CTRL +denoise +denoised +denoising +depthanything +DepthAnythingV +DownloadAndLoadDepthAnythingV +dtype +flf +gemma +glb +hidream +hunyuanvideo +jina +lcm +loras +LoRAs +LoRa +ltx +luma +ministral +multimodal +omnigen +outpaint +preprocess +preprocessor +preprocessors +px +rgb +schnell +sd +sigclip +subgraph +subgraphs +timestep +vitl +# Additional model/node/author names found in workflow notes +APIs +ATI +BFL +Crossfade +crossfade +Dev +dev +Durations +CircleStone +EasyCache +HD +INFL +MLSD +MJM +ModelSamplingFlux +Neta +NetaYume +NewBieAI +Omni +PAI +PBench +RTX +Rdphoto +RealAlpha +TurboComfyv +UGC +UltraSharp +Unbypass +Unmute +Upscales +VLLM +VPN +VideoHelperSuite +WanMove +WanVideo +Wuli +ZImageFunControlnet +abliterated +acestep +aio +alibaba +annovel +bande +byt +capybara +chinese +chroma +chrono +chronoedit +coords +de +desc +dessinee +detr +diffsynth +dit +drozbay +emaonly +english +ernie +finetuned +fn +glyphxl +gummycandy +hardcode +hgnet +humo +infiniteTalk +iteratively +juggernautXL +kandinsky +kijai +klein +krea +kv +lightx +lineart +longcat +meituan +multistep +nd +num +outpainting +ovis +pai +params +pc +photorealistic +preprocessed +pretrain +pretrained +pth +quantile +redux +reframe +reframes +reproducibility +rescaling +rgba +risograph +sdpose +sft +shoujo +softedge +sr +talkvid +timesteps +umt +uploadable +upscaled +uso +vace +vec +vl +wav +wholebody +xgen +xl +zhuanchang +zimage +KSampler +Ksampler +ByteDance +Seedance +LoRas +dropdown +MelBandRoformer +Denoises +ModelSampling +CMD +SYSTMS +systms +anima +BFL's +VC +vc +Un +centric +https +kijai's +KJNode's +Kosinkadink +bidirect +embeded +liveportrait +# Additional words from spellcheck failures +AIrt +BRIA +ESRGAN +ElevenLabs +FlashVSR +GPT +HappyHorse +HitPaw +Init +LYF +LivePortrait +LoadVideo +MAchIne +Magnific +Matcher +Meshy +MiniMax +Mockup +Mockups +Nvidia +OOD +OmniGen +OpenAI +Packag +PixVerse +PromptSelectorBasic +PurzBeats +QwenVL +Reimagine +Reve +SDPose +SVD +SVG +SaveImage +Seedream +Sonilo +Swwan +Tencent +Tripo +Vectorization +Veo +WanAnimatePreprocess +WanVideoWrapper +WaveSpeed +ai +airt +animatediff +ati +audioseparation +bg +bria +bv +bw +bytedace +cartier +clud +coasta +comfyorg +controlaltai +crafter +cropandstitch +cyber +dall +depthAnything +depthanythingv +dieline +dj +egyptian +elevenLabs +elevenlabs +eric +facegen +feild +fi +firered +flshvsr +flsih +gan +gemini +gettysburg +gimm +goldenfish +gpt +gpu +gsc +gsl +hailuo +happyhorse +hdr +hellorob +hitpaw +hk +ia +includeOnDistributions +infinitetalk +infl +ingi +init +inp +instantx +io +ipadapter +ipadapters +iso +japanese +kjnodes +kling +layerstyle +liveportraitkj +liveportrat +llm +lucide +lumina +machIne +magnific +mech +melbandroformer +meshy +minimax +mjm +mockups +moonvalley +multiangle +multishot +multistyle +multiview +nano +nanobanana +nanobananapro +nb +netayume +newbieimage +nodeId +nodeType +normals +ns +ohneis +omni +ood +openSource +openai +palying +pfp +pipeapple +pixverse +purz +quokka +qwenmultiangle +recomposer +recraft +recreator +remixer +requiresCustomNodes +restyler +retopo +reve +rmbg +rodin +runing +sam +scail +sdturbo +sdxlturbo +searchRank +seedance +seedream +seedvr +sferro +shane +sirolim +skindetail +sonilo +sora +sportbike +streetwear +supir +svg +tripo +tts +turo +tv +txt +ultimatesdupscale +uv +veo +vfi +vid +videohelpersuite +videoupscaler +vr +vton +wanmove +wanvideowrapper +wavespeed +wavespped +wireframe +workBox +workbox +york +zoomHover + +# Words from index.json spellcheck - valid technical/brand terms +Astra +DiT +Equirectangular +FIBO +Grayscale +HY +HunYuan +KeyFrame +Keyframe +MMDiT +MV +MiniMax's +OOTD +OpenAI's +PBR +Photoshoot +Recraft's +Retopology +Reve's +SUPIR's +Skybox +Tripo's +VideoGen +Vidu's +Wes +apo +customizable +durations +equirectangular +flatlay +impactful +keyframe +keyframes +keypoints +lookdev +multiviews +outpainted +photogrammetry +photorealism +recomposition +relit +skybox +unbypass +undistilled +upscales +ByteDance's diff --git a/.github/extract_workflow_text.py b/.github/extract_workflow_text.py new file mode 100644 index 000000000..a7cc366f3 --- /dev/null +++ b/.github/extract_workflow_text.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +""" +Extract text content from workflow JSON files for spellchecking. +Reads all workflow JSONs in a directory and outputs the combined text +from MarkdownNote and Note nodes to a single file. + +With --index, extracts English title/description/tag fields from index.json instead. +""" +import argparse +import json +import sys +from pathlib import Path + + +def extract_notes_from_workflow(path: Path) -> list[str]: + """Return text strings from MarkdownNote/Note nodes in a workflow JSON.""" + try: + data = json.loads(path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError): + return [] + + nodes = [] + if isinstance(data, dict): + if "nodes" in data: + nodes = data["nodes"] + elif "workflow" in data and "nodes" in data.get("workflow", {}): + nodes = data["workflow"]["nodes"] + elif isinstance(data, list): + nodes = data + + texts = [] + for node in nodes: + if not isinstance(node, dict): + continue + if node.get("type") not in ("MarkdownNote", "Note"): + continue + for value in node.get("widgets_values", []): + if isinstance(value, str) and value.strip(): + texts.append(value.strip()) + + return texts + + +def extract_index_english_text(index_path: Path) -> list[str]: + """Extract English title, description, and tag strings from index.json.""" + try: + data = json.loads(index_path.read_text(encoding="utf-8")) + except (json.JSONDecodeError, OSError): + return [] + + texts = [] + for category in data: + if not isinstance(category, dict): + continue + for template in category.get("templates", []): + if not isinstance(template, dict): + continue + for field in ("title", "description"): + value = template.get(field, "") + if isinstance(value, str) and value.strip(): + texts.append(value.strip()) + for tag in template.get("tags", []): + if isinstance(tag, str) and tag.strip(): + texts.append(tag.strip()) + return texts + + +def build_token_source_map(file_texts: list[tuple[str, list[str]]]) -> dict[str, list[str]]: + """Return {lowercase_token: [filename, ...]} for every word in every text block.""" + import re + token_map: dict[str, list[str]] = {} + for filename, texts in file_texts: + seen: set[str] = set() + for text in texts: + for token in re.findall(r"[A-Za-z']+", text): + t = token.lower() + if t not in seen: + seen.add(t) + token_map.setdefault(t, []) + if filename not in token_map[t]: + token_map[t].append(filename) + return token_map + + +def main() -> None: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--input-dir", required=True, help="Directory containing workflow JSON files (also used to find index.json with --index)") + parser.add_argument("--output-file", required=True, help="Output text file for spellchecking") + parser.add_argument("--source-map", help="Optional path to write a JSON token→[filename] map for error attribution") + parser.add_argument("--files", nargs="*", help="Specific files to extract (relative or absolute paths); if omitted, all JSONs in --input-dir are used") + parser.add_argument("--index", action="store_true", help="Extract English text from index.json instead of workflow note nodes") + args = parser.parse_args() + + input_dir = Path(args.input_dir) + output = Path(args.output_file) + output.parent.mkdir(parents=True, exist_ok=True) + + if args.index: + index_path = input_dir / "index.json" + if not index_path.exists(): + print(f"Error: {index_path} not found", file=sys.stderr) + sys.exit(1) + all_texts = extract_index_english_text(index_path) + if all_texts: + output.write_text("\n".join(all_texts), encoding="utf-8") + print(f"Extracted {len(all_texts)} text field(s) from {index_path} → {output}") + else: + output.write_text("", encoding="utf-8") + print(f"No text found in {index_path}; wrote empty file → {output}") + if args.source_map: + token_map = build_token_source_map([("index.json", all_texts)]) + Path(args.source_map).write_text(json.dumps(token_map), encoding="utf-8") + return + + if not input_dir.is_dir(): + print(f"Error: {input_dir} is not a directory", file=sys.stderr) + sys.exit(1) + + if args.files: + workflow_files = sorted(Path(f) for f in args.files if not Path(f).name.startswith("index")) + else: + workflow_files = [ + p for p in sorted(input_dir.glob("*.json")) + if not p.name.startswith("index") + ] + + file_texts: list[tuple[str, list[str]]] = [] + all_texts: list[str] = [] + for wf_path in workflow_files: + texts = extract_notes_from_workflow(wf_path) + if texts: + file_texts.append((wf_path.name, texts)) + all_texts.extend(texts) + + if all_texts: + output.write_text("\n\n---\n\n".join(all_texts), encoding="utf-8") + print(f"Extracted {len(all_texts)} note(s) from {len(workflow_files)} workflow(s) → {output}") + else: + output.write_text("", encoding="utf-8") + print(f"No notes found in {len(workflow_files)} workflow(s); wrote empty file → {output}") + + if args.source_map: + token_map = build_token_source_map(file_texts) + Path(args.source_map).write_text(json.dumps(token_map), encoding="utf-8") + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/build-test.yml b/.github/workflows/build-test.yml index b4ca3134b..d67b2f350 100644 --- a/.github/workflows/build-test.yml +++ b/.github/workflows/build-test.yml @@ -11,6 +11,21 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + fetch-depth: 1 + sparse-checkout: | + templates/ + blueprints/ + bundles.json + blueprints_bundles.json + scripts/ + packages/ + nx.json + package.json + package-lock.json + pyproject.toml + tsconfig.base.json + sparse-checkout-cone-mode: false - name: Set up Node.js uses: actions/setup-node@v4 diff --git a/.github/workflows/check_input_assets.yml b/.github/workflows/check_input_assets.yml index f525d78f3..d3b6fa37e 100644 --- a/.github/workflows/check_input_assets.yml +++ b/.github/workflows/check_input_assets.yml @@ -21,7 +21,12 @@ jobs: uses: actions/checkout@v4 with: fetch-depth: 0 - + sparse-checkout: | + templates/*.json + input/ + scripts/check_input_assets.py + sparse-checkout-cone-mode: false + - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/.github/workflows/cron-rebuild-site.yml b/.github/workflows/cron-rebuild-site.yml new file mode 100644 index 000000000..55662765e --- /dev/null +++ b/.github/workflows/cron-rebuild-site.yml @@ -0,0 +1,81 @@ +name: Cron Rebuild Site + +on: + schedule: + # Once per day at midnight UTC — rebuilds the site to pick up new UGC workflows + # for search index, sitemap, filter pages, and pre-rendered detail pages. + # Only approved workflows are included in the production build. + - cron: '0 0 * * *' + workflow_dispatch: # Allow manual trigger + +concurrency: + group: deploy-prod + cancel-in-progress: false + +permissions: + contents: read + +jobs: + rebuild: + runs-on: ubuntu-latest + env: + SKIP_AI_GENERATION: 'true' + PUBLIC_POSTHOG_KEY: phc_iKfK86id4xVYws9LybMje0h44eGtfwFgRPIBehmy8rO + PUBLIC_GA_MEASUREMENT_ID: ${{ secrets.PUBLIC_GA_MEASUREMENT_ID }} + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup + uses: ./.github/actions/site-setup + + - name: Restore content cache + uses: actions/cache@v4 + with: + path: site/.content-cache + key: content-cache-cron-prod-${{ hashFiles('templates/**', 'site/src/**') }} + restore-keys: | + content-cache-cron-prod- + + - name: Sync templates + run: pnpm run sync + working-directory: site + + - name: Build Astro site + run: pnpm run build + working-directory: site + env: + PUBLIC_HUB_API_URL: ${{ secrets.HUB_API_URL_PRODUCTION }} + PUBLIC_COMFY_CLOUD_URL: ${{ secrets.COMFY_CLOUD_URL_PRODUCTION }} + PUBLIC_APPROVED_ONLY: 'true' + + - name: Deploy to Vercel + working-directory: site + env: + VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }} + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + run: | + stdout_file="$RUNNER_TEMP/vercel-production.stdout" + stderr_file="$RUNNER_TEMP/vercel-production.stderr" + site_output_dir="$GITHUB_WORKSPACE/site/.vercel/output" + root_output_dir="$GITHUB_WORKSPACE/.vercel/output" + + if [ ! -d "$site_output_dir" ]; then + echo "Expected prebuilt output in $site_output_dir" >&2 + exit 1 + fi + + mkdir -p "$GITHUB_WORKSPACE/.vercel" + rm -rf "$root_output_dir" + cp -R "$site_output_dir" "$root_output_dir" + + if npx vercel@latest deploy --cwd "$GITHUB_WORKSPACE" --prebuilt --prod --yes \ + --token="$VERCEL_TOKEN" >"$stdout_file" 2>"$stderr_file"; then + cat "$stderr_file" >&2 + cat "$stdout_file" + else + cat "$stderr_file" >&2 + cat "$stdout_file" + exit 1 + fi diff --git a/.github/workflows/deploy-site.yml b/.github/workflows/deploy-site.yml index c503de609..77a6c072b 100644 --- a/.github/workflows/deploy-site.yml +++ b/.github/workflows/deploy-site.yml @@ -1,3 +1,9 @@ +# Triggers: (1) workflow_dispatch — always runs. (2) workflow_run after +# "Publish to PyPI" on main — runs only if that workflow succeeded AND +# repository variable DEPLOY_SITE_ON_PYPI_PUBLISH is not the string "false". +# To disable version-bump chain deploy: Settings > Secrets and variables > +# Actions > Variables — set DEPLOY_SITE_ON_PYPI_PUBLISH to false. Remove it or +# set to true to enable (matches historical behavior when unset). name: Deploy Template Site on: @@ -21,7 +27,7 @@ on: branches: [main] concurrency: - group: deploy-${{ github.ref }} + group: deploy-prod cancel-in-progress: false permissions: @@ -30,7 +36,13 @@ permissions: jobs: build-deploy: runs-on: ubuntu-latest - if: ${{ github.event_name == 'workflow_dispatch' || github.event.workflow_run.conclusion == 'success' }} + if: >- + github.event_name == 'workflow_dispatch' + || ( + github.event_name == 'workflow_run' + && github.event.workflow_run.conclusion == 'success' + && vars.DEPLOY_SITE_ON_PYPI_PUBLISH != 'false' + ) env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} PUBLIC_POSTHOG_KEY: phc_iKfK86id4xVYws9LybMje0h44eGtfwFgRPIBehmy8rO @@ -60,12 +72,38 @@ jobs: - name: Build Astro site run: pnpm run build working-directory: site + env: + PUBLIC_HUB_API_URL: ${{ secrets.HUB_API_URL_PRODUCTION }} + PUBLIC_COMFY_CLOUD_URL: ${{ secrets.COMFY_CLOUD_URL_PRODUCTION }} + PUBLIC_APPROVED_ONLY: 'true' - name: Deploy to Vercel - uses: amondnet/vercel-action@v25 - with: - vercel-token: ${{ secrets.VERCEL_TOKEN }} - vercel-org-id: ${{ secrets.VERCEL_ORG_ID }} - vercel-project-id: ${{ secrets.VERCEL_PROJECT_ID }} - working-directory: site - vercel-args: '--prebuilt --prod' + working-directory: site + env: + VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }} + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + run: | + stdout_file="$RUNNER_TEMP/vercel-production.stdout" + stderr_file="$RUNNER_TEMP/vercel-production.stderr" + site_output_dir="$GITHUB_WORKSPACE/site/.vercel/output" + root_output_dir="$GITHUB_WORKSPACE/.vercel/output" + + if [ ! -d "$site_output_dir" ]; then + echo "Expected prebuilt output in $site_output_dir" >&2 + exit 1 + fi + + mkdir -p "$GITHUB_WORKSPACE/.vercel" + rm -rf "$root_output_dir" + cp -R "$site_output_dir" "$root_output_dir" + + if npx vercel@latest deploy --cwd "$GITHUB_WORKSPACE" --prebuilt --prod --yes \ + --token="$VERCEL_TOKEN" >"$stdout_file" 2>"$stderr_file"; then + cat "$stderr_file" >&2 + cat "$stdout_file" + else + cat "$stderr_file" >&2 + cat "$stdout_file" + exit 1 + fi diff --git a/.github/workflows/generate-upload-json.yml b/.github/workflows/generate-upload-json.yml index 00d9c96af..f0d588322 100644 --- a/.github/workflows/generate-upload-json.yml +++ b/.github/workflows/generate-upload-json.yml @@ -32,7 +32,13 @@ jobs: fetch-depth: 0 ref: ${{ github.event_name == 'pull_request' && github.head_ref || github.ref }} token: ${{ secrets.GITHUB_TOKEN }} - + sparse-checkout: | + templates/index.json + input/ + scripts/check_input_assets.py + workflow_template_input_files.json + sparse-checkout-cone-mode: false + - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/.github/workflows/link-checker.yml b/.github/workflows/link-checker.yml index f29243ea3..3f7376f0c 100644 --- a/.github/workflows/link-checker.yml +++ b/.github/workflows/link-checker.yml @@ -22,6 +22,12 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + sparse-checkout: | + templates/*.json + scripts/check_links.py + scripts/whitelist.json + sparse-checkout-cone-mode: false - name: Set up Python uses: actions/setup-python@v5 @@ -30,7 +36,7 @@ jobs: - name: Install lychee run: | - curl -sSL https://github.com/lycheeverse/lychee/releases/latest/download/lychee-x86_64-unknown-linux-gnu.tar.gz | tar -xz + curl -sSL https://github.com/lycheeverse/lychee/releases/download/lychee-v0.24.1/lychee-x86_64-unknown-linux-gnu.tar.gz | tar -xz --strip-components=1 sudo mv lychee /usr/local/bin/ lychee --version diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index ff6c6f4ed..03736b858 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -16,6 +16,10 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 + with: + sparse-checkout: | + .github/workflows/ + sparse-checkout-cone-mode: false - name: Set up Node.js uses: actions/setup-node@v4 diff --git a/.github/workflows/model-analysis.yml b/.github/workflows/model-analysis.yml index ceebee57d..58d3f39ca 100644 --- a/.github/workflows/model-analysis.yml +++ b/.github/workflows/model-analysis.yml @@ -20,6 +20,11 @@ jobs: uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha || github.sha }} + sparse-checkout: | + templates/*.json + scripts/analyze_models.py + scripts/whitelist.json + sparse-checkout-cone-mode: false - name: Set up Python uses: actions/setup-python@v5 diff --git a/.github/workflows/preview-cron.yml b/.github/workflows/preview-cron.yml new file mode 100644 index 000000000..7bed19a7a --- /dev/null +++ b/.github/workflows/preview-cron.yml @@ -0,0 +1,155 @@ +name: Preview Cron + +on: + schedule: + - cron: '0 0 * * *' + workflow_dispatch: + +permissions: + contents: read + pull-requests: write + +jobs: + discover: + runs-on: ubuntu-latest + outputs: + matrix: ${{ steps.targets.outputs.matrix }} + steps: + - uses: actions/checkout@v4 + + - name: Build rebuild targets + id: targets + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + targets='[]' + + # Main with production API (all workflows, no approved filter) + targets=$(echo "$targets" | jq -c '. + [{"ref": "main", "is_main": true, "pr": 0, "api_env": "production"}]') + + # Main with test API + targets=$(echo "$targets" | jq -c '. + [{"ref": "main", "is_main": true, "pr": 0, "api_env": "test"}]') + + # Find open PRs with the "preview-cron" label + prs=$(gh pr list --label "preview-cron" --state open --json number,headRefName) + for row in $(echo "$prs" | jq -c '.[]'); do + ref=$(echo "$row" | jq -r '.headRefName') + num=$(echo "$row" | jq -r '.number') + targets=$(echo "$targets" | jq -c \ + --arg ref "$ref" --argjson num "$num" \ + '. + [{"ref": $ref, "is_main": false, "pr": $num, "api_env": "test"}]') + done + + echo "matrix={\"include\":$targets}" >> "$GITHUB_OUTPUT" + echo "### Rebuild targets" >> "$GITHUB_STEP_SUMMARY" + echo "$targets" | jq '.' >> "$GITHUB_STEP_SUMMARY" + + rebuild: + needs: discover + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: ${{ fromJson(needs.discover.outputs.matrix) }} + concurrency: + group: preview-cron-${{ matrix.ref }}-${{ matrix.api_env }} + cancel-in-progress: true + env: + SKIP_AI_GENERATION: 'true' + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + ref: ${{ matrix.ref }} + + - name: Setup + uses: ./.github/actions/site-setup + + - name: Restore content cache + uses: actions/cache@v4 + with: + path: site/.content-cache + key: content-cache-cron-${{ matrix.ref }}-${{ matrix.api_env }}-${{ hashFiles('templates/**', 'site/src/**') }} + restore-keys: | + content-cache-cron-${{ matrix.ref }}-${{ matrix.api_env }}- + + - name: Sync templates + run: pnpm run sync:en-only + working-directory: site + + - name: Build Astro site + run: pnpm run build + working-directory: site + env: + PUBLIC_HUB_API_URL: ${{ matrix.api_env == 'test' && secrets.HUB_API_URL_PREVIEW || secrets.HUB_API_URL_PRODUCTION }} + PUBLIC_COMFY_CLOUD_URL: ${{ matrix.api_env == 'test' && secrets.COMFY_CLOUD_URL_PREVIEW || secrets.COMFY_CLOUD_URL_PRODUCTION }} + + - name: Deploy to Vercel + id: deploy + working-directory: site + env: + VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }} + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + run: | + stdout_file="$RUNNER_TEMP/vercel-preview.stdout" + stderr_file="$RUNNER_TEMP/vercel-preview.stderr" + site_output_dir="$GITHUB_WORKSPACE/site/.vercel/output" + root_output_dir="$GITHUB_WORKSPACE/.vercel/output" + + if [ ! -d "$site_output_dir" ]; then + echo "Expected prebuilt output in $site_output_dir" >&2 + exit 1 + fi + + mkdir -p "$GITHUB_WORKSPACE/.vercel" + rm -rf "$root_output_dir" + cp -R "$site_output_dir" "$root_output_dir" + + if npx vercel@latest deploy --cwd "$GITHUB_WORKSPACE" --prebuilt --yes \ + --token="$VERCEL_TOKEN" >"$stdout_file" 2>"$stderr_file"; then + cat "$stderr_file" >&2 + cat "$stdout_file" + + preview_url="$(tail -n 1 "$stdout_file" | tr -d '\r')" + if [[ ! "$preview_url" =~ ^https?:// ]]; then + echo "Could not parse preview URL from Vercel CLI output" >&2 + exit 1 + fi + + echo "preview-url=$preview_url" >> "$GITHUB_OUTPUT" + else + cat "$stderr_file" >&2 + cat "$stdout_file" + exit 1 + fi + + - name: Alias main preview + if: matrix.is_main + env: + PREVIEW_URL: ${{ steps.deploy.outputs.preview-url }} + PROD_ALIAS: ${{ secrets.VERCEL_PREVIEW_ALIAS }} + TEST_ALIAS: ${{ secrets.VERCEL_PREVIEW_TEST_ALIAS }} + API_ENV: ${{ matrix.api_env }} + VERCEL_TOKEN_VAL: ${{ secrets.VERCEL_TOKEN }} + VERCEL_SCOPE: ${{ secrets.VERCEL_ORG_ID }} + run: | + if [ "$API_ENV" = "production" ]; then + ALIAS="$PROD_ALIAS" + else + ALIAS="$TEST_ALIAS" + fi + if [ -n "$ALIAS" ]; then + npx vercel alias "$PREVIEW_URL" "$ALIAS" --token="$VERCEL_TOKEN_VAL" --scope="$VERCEL_SCOPE" + else + echo "Alias secret not set for $API_ENV, skipping" + fi + + - name: Comment preview URL on PR + if: matrix.pr > 0 + uses: marocchino/sticky-pull-request-comment@v2 + with: + number: ${{ matrix.pr }} + header: preview-cron + message: | + 🔄 **Preview cron rebuilt:** ${{ steps.deploy.outputs.preview-url }} + _Last rebuild: ${{ github.event.head_commit.timestamp || 'manual trigger' }}_ diff --git a/.github/workflows/preview-site.yml b/.github/workflows/preview-site.yml new file mode 100644 index 000000000..e4e31a3c0 --- /dev/null +++ b/.github/workflows/preview-site.yml @@ -0,0 +1,94 @@ +name: Preview Site + +on: + pull_request: + paths: + - 'site/**' + workflow_dispatch: + +concurrency: + group: preview-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +jobs: + preview: + runs-on: ubuntu-latest + env: + SKIP_AI_GENERATION: 'true' + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Setup + uses: ./.github/actions/site-setup + + - name: Restore content cache + uses: actions/cache@v4 + with: + path: site/.content-cache + key: content-cache-${{ hashFiles('templates/**', 'site/src/**') }} + restore-keys: | + content-cache- + + - name: Sync templates + run: pnpm run sync:en-only + working-directory: site + + - name: Build Astro site + run: pnpm run build + working-directory: site + env: + PUBLIC_HUB_API_URL: ${{ secrets.HUB_API_URL_PREVIEW }} + PUBLIC_COMFY_CLOUD_URL: ${{ secrets.COMFY_CLOUD_URL_PREVIEW }} + + - name: Deploy preview to Vercel + id: deploy + working-directory: site + env: + VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }} + VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }} + VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }} + run: | + stdout_file="$RUNNER_TEMP/vercel-preview.stdout" + stderr_file="$RUNNER_TEMP/vercel-preview.stderr" + site_output_dir="$GITHUB_WORKSPACE/site/.vercel/output" + root_output_dir="$GITHUB_WORKSPACE/.vercel/output" + + if [ ! -d "$site_output_dir" ]; then + echo "Expected prebuilt output in $site_output_dir" >&2 + exit 1 + fi + + mkdir -p "$GITHUB_WORKSPACE/.vercel" + rm -rf "$root_output_dir" + cp -R "$site_output_dir" "$root_output_dir" + + if npx vercel@latest deploy --cwd "$GITHUB_WORKSPACE" --prebuilt --yes \ + --token="$VERCEL_TOKEN" >"$stdout_file" 2>"$stderr_file"; then + cat "$stderr_file" >&2 + cat "$stdout_file" + + preview_url="$(tail -n 1 "$stdout_file" | tr -d '\r')" + if [[ ! "$preview_url" =~ ^https?:// ]]; then + echo "Could not parse preview URL from Vercel CLI output" >&2 + exit 1 + fi + + echo "preview-url=$preview_url" >> "$GITHUB_OUTPUT" + else + cat "$stderr_file" >&2 + cat "$stdout_file" + exit 1 + fi + + - name: Comment preview URL + if: github.event_name == 'pull_request' + uses: marocchino/sticky-pull-request-comment@v2 + with: + header: vercel-preview + message: | + 🚀 **Preview deployed:** ${{ steps.deploy.outputs.preview-url }} diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 38e8e1a2a..840e74618 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -2,22 +2,109 @@ name: Publish to PyPI # This workflow publishes packages to PyPI and creates GitHub releases. # Triggers: -# 1. When pyproject.toml changes on main branch (version bump from PR merge) +# 1. When pyproject.toml changes on main branch (version bump from PR merge) # 2. Manual dispatch (for recovery/debugging) -# +# # Features: +# - Conditional PyPI publishing (only when PR has 'release' label) +# - Always creates GitHub Release (regardless of label) # - Version change detection (prevents unnecessary publishes) # - Recovery mode (publishes packages out-of-sync with PyPI) # - Dependency order publishing (subpackages before meta package) on: workflow_dispatch: + inputs: + force_publish: + description: 'Force publish to PyPI (bypass label check)' + required: false + type: boolean + default: false push: branches: [main] paths: - "pyproject.toml" jobs: + check-pr-label: + runs-on: ubuntu-latest + outputs: + should-publish: ${{ steps.check.outputs.should_publish }} + pr-number: ${{ steps.pr.outputs.number }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + sparse-checkout: | + pyproject.toml + sparse-checkout-cone-mode: false + + - name: Find associated PR + id: pr + uses: actions/github-script@v7 + with: + script: | + // For workflow_dispatch, allow force publish + if (context.eventName === 'workflow_dispatch') { + const forcePublish = '${{ inputs.force_publish }}' === 'true'; + core.setOutput('number', forcePublish ? 'manual' : ''); + return; + } + + // Find PR associated with this merge commit + const commit = context.sha; + const { data: prs } = await github.rest.repos.listPullRequestsAssociatedWithCommit({ + owner: context.repo.owner, + repo: context.repo.repo, + commit_sha: commit, + }); + + if (prs.length > 0) { + const pr = prs[0]; + core.setOutput('number', pr.number); + console.log(`Found associated PR #${pr.number}`); + } else { + core.setOutput('number', ''); + console.log('No associated PR found'); + } + + - name: Check for release label + id: check + uses: actions/github-script@v7 + with: + script: | + const prNumber = '${{ steps.pr.outputs.number }}'; + + // Manual workflow with force_publish + if (prNumber === 'manual') { + core.setOutput('should_publish', 'true'); + console.log('✅ Manual workflow dispatch with force_publish=true - will publish to PyPI'); + return; + } + + // No PR found - skip PyPI publish (but still create GitHub release) + if (!prNumber) { + core.setOutput('should_publish', 'false'); + console.log('⚠️ No associated PR found - skipping PyPI publish (will only create GitHub release)'); + return; + } + + // Check if PR has 'release' label + const { data: pr } = await github.rest.pulls.get({ + owner: context.repo.owner, + repo: context.repo.repo, + pull_number: parseInt(prNumber), + }); + + const hasReleaseLabel = pr.labels.some(label => label.name === 'release'); + core.setOutput('should_publish', hasReleaseLabel ? 'true' : 'false'); + + if (hasReleaseLabel) { + console.log(`✅ PR #${prNumber} has 'release' label - will publish to PyPI`); + } else { + console.log(`⚠️ PR #${prNumber} does NOT have 'release' label - skipping PyPI publish (will only create GitHub release)`); + } + check-version-change: runs-on: ubuntu-latest # Prevent concurrent publish workflows to avoid race conditions @@ -33,6 +120,10 @@ jobs: with: token: ${{ secrets.PAT_TOKEN }} fetch-depth: 0 # Full history needed for tags and version comparison + sparse-checkout: | + pyproject.toml + packages/*/pyproject.toml + sparse-checkout-cone-mode: false - name: Check if version actually changed id: check @@ -49,7 +140,7 @@ jobs: fi publish: - needs: check-version-change + needs: [check-version-change, check-pr-label] if: needs.check-version-change.outputs.version-changed == 'true' || github.event_name == 'workflow_dispatch' runs-on: ubuntu-latest permissions: @@ -83,7 +174,20 @@ jobs: source .venv/bin/activate pip install --upgrade pip build twine + - name: Check PyPI publish decision + run: | + SHOULD_PUBLISH="${{ needs.check-pr-label.outputs.should-publish }}" + PR_NUMBER="${{ needs.check-pr-label.outputs.pr-number }}" + + if [ "$SHOULD_PUBLISH" = "true" ]; then + echo "📦 Will publish to PyPI (PR #$PR_NUMBER has 'release' label)" + else + echo "⏭️ Skipping PyPI publish (PR #$PR_NUMBER missing 'release' label)" + echo " Only GitHub Release will be created" + fi + - name: Determine packages to build + if: needs.check-pr-label.outputs.should-publish == 'true' id: changed-packages run: | echo "=== Package Detection Logic ===" @@ -97,7 +201,7 @@ jobs: # Check for package version changes (primary trigger) echo -e "\n--- Checking for changed package versions ---" - for pkg in core media_api media_video media_image media_other; do + for pkg in core media_api media_video media_image media_other blueprints; do if echo "$CHANGED_FILES" | grep -q "packages/$pkg/pyproject.toml"; then PACKAGES="$PACKAGES $pkg" echo "✓ Will publish $pkg (version file changed)" @@ -183,6 +287,7 @@ jobs: fi - name: Validate bundles before publishing + if: needs.check-pr-label.outputs.should-publish == 'true' run: | echo "=== Pre-publish Bundle Validation ===" # Safety check: ensure all templates are assigned to bundles @@ -190,6 +295,7 @@ jobs: ./scripts/ci/validate_bundles.sh - name: Sync manifest and bundle assets + if: needs.check-pr-label.outputs.should-publish == 'true' run: | source .venv/bin/activate echo "=== Syncing manifests and bundle assets ===" @@ -197,6 +303,7 @@ jobs: echo "✅ Manifest and bundle sync complete" - name: Build and publish packages in dependency order + if: needs.check-pr-label.outputs.should-publish == 'true' run: | source .venv/bin/activate rm -rf dist @@ -282,9 +389,22 @@ jobs: TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} - name: Create GitHub Release + if: always() && needs.check-version-change.outputs.new-version env: GH_TOKEN: ${{ github.token }} run: | - gh release create "v${{ needs.check-version-change.outputs.new-version }}" \ - --title "v${{ needs.check-version-change.outputs.new-version }}" \ - --generate-notes + SHOULD_PUBLISH="${{ needs.check-pr-label.outputs.should-publish }}" + VERSION="${{ needs.check-version-change.outputs.new-version }}" + + # Create release with appropriate notes + if [ "$SHOULD_PUBLISH" = "true" ]; then + gh release create "v$VERSION" \ + --title "v$VERSION" \ + --notes "📦 **Published to PyPI**: Packages are available via \`pip install comfyui-workflow-templates==$VERSION\`" \ + --generate-notes + else + gh release create "v$VERSION" \ + --title "v$VERSION" \ + --notes "⚠️ **Not published to PyPI**: This release was created without PyPI packages (PR missing \`release\` label). To publish, add the \`release\` label to the PR and re-trigger the workflow." \ + --generate-notes + fi diff --git a/.github/workflows/spellcheck-comment.yml b/.github/workflows/spellcheck-comment.yml new file mode 100644 index 000000000..b36379a52 --- /dev/null +++ b/.github/workflows/spellcheck-comment.yml @@ -0,0 +1,102 @@ +name: Spellcheck Comment + +on: + workflow_run: + workflows: ["Spellcheck"] + types: [completed] + +jobs: + comment: + name: Post spellcheck results + runs-on: ubuntu-latest + # workflow_run always runs in the context of the base repo, so GITHUB_TOKEN has write access + if: github.event.workflow_run.event == 'pull_request' + permissions: + pull-requests: write + actions: read + steps: + - name: Download results artifact + id: download + uses: actions/github-script@v7 + with: + script: | + const artifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: ${{ github.event.workflow_run.id }}, + }); + const artifact = artifacts.data.artifacts.find(a => a.name === 'spellcheck-results'); + if (!artifact) { + core.setOutput('found', 'false'); + return; + } + core.setOutput('found', 'true'); + const download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: artifact.id, + archive_format: 'zip', + }); + require('fs').writeFileSync('/tmp/spellcheck-results.zip', Buffer.from(download.data)); + + - name: Extract artifact + if: steps.download.outputs.found == 'true' + run: unzip /tmp/spellcheck-results.zip -d /tmp/spellcheck-results/ + + - name: Post PR comment + if: steps.download.outputs.found == 'true' + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const readFile = p => { try { return fs.readFileSync(p, 'utf8').trim(); } catch { return ''; } }; + + const results = JSON.parse(fs.readFileSync('/tmp/spellcheck-results/results.json', 'utf8')); + const indexErrors = readFile('/tmp/spellcheck-results/index_errors.txt'); + const workflowErrors = readFile('/tmp/spellcheck-results/workflows_errors.txt'); + + const { pr_number, index_result, workflows_result } = results; + const icon = r => r === 'pass' ? '✅' : '❌'; + const label = r => r === 'pass' ? 'No issues found' : 'Misspelled words detected'; + + let body = `## 🔤 Spellcheck Results\n\n`; + body += `| Check | Status |\n|---|---|\n`; + body += `| \`templates/index.json\` (titles & descriptions) | ${icon(index_result)} ${label(index_result)} |\n`; + body += `| Workflow JSON notes (MarkdownNote / Note nodes) | ${icon(workflows_result)} ${label(workflows_result)} |\n\n`; + + if (index_result !== 'pass' && indexErrors) { + body += `
\n❌ index.json errors\n\n\`\`\`\n${indexErrors}\n\`\`\`\n
\n\n`; + } + if (workflows_result !== 'pass' && workflowErrors) { + body += `
\n❌ Workflow notes errors\n\n\`\`\`\n${workflowErrors}\n\`\`\`\n
\n\n`; + } + + const allPass = index_result === 'pass' && workflows_result === 'pass'; + body += allPass + ? '> All spellchecks passed! 🎉\n' + : '> Add legitimate technical terms to `.github/.wordlist.txt` to suppress false positives.\n'; + + const marker = ''; + body = marker + '\n' + body; + + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: pr_number, + }); + const existing = comments.find(c => c.body.includes(marker)); + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: pr_number, + body, + }); + } diff --git a/.github/workflows/spellcheck.yml b/.github/workflows/spellcheck.yml new file mode 100644 index 000000000..84387b85d --- /dev/null +++ b/.github/workflows/spellcheck.yml @@ -0,0 +1,91 @@ +name: Spellcheck + +on: + pull_request: + branches: [ main ] + paths: + - 'templates/**' + - '.github/.spellcheck.yml' + - '.github/.spellcheck-workflows.yml' + - '.github/.wordlist.txt' + - '.github/workflows/spellcheck.yml' + - '.github/extract_workflow_text.py' + +concurrency: + group: spellcheck-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + spellcheck: + name: Spellcheck + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + sparse-checkout: | + templates/ + .github/ + sparse-checkout-cone-mode: false + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.12' + + - name: Install pyspelling + aspell + run: | + sudo apt-get install -y aspell aspell-en + pip install pyspelling pymdown-extensions + + - name: Extract MarkdownNote text from workflow JSONs + run: | + python .github/extract_workflow_text.py \ + --input-dir templates \ + --output-file /tmp/workflow_notes.txt + + - name: Spellcheck templates/index.json + id: check_index + run: | + set +e + pyspelling --config .github/.spellcheck.yml 2>&1 | tee /tmp/index_errors.txt + echo "result=$( [ ${PIPESTATUS[0]} -eq 0 ] && echo pass || echo fail )" >> "$GITHUB_OUTPUT" + + - name: Spellcheck workflow JSON notes + id: check_workflows + run: | + set +e + pyspelling --config .github/.spellcheck-workflows.yml --name "Workflow Notes" --source /tmp/workflow_notes.txt 2>&1 | tee /tmp/workflows_errors.txt + echo "result=$( [ ${PIPESTATUS[0]} -eq 0 ] && echo pass || echo fail )" >> "$GITHUB_OUTPUT" + + - name: Save results for PR comment + if: always() && github.event_name == 'pull_request' + run: | + mkdir -p /tmp/spellcheck-results + cat > /tmp/spellcheck-results/results.json << EOF + { + "pr_number": ${{ github.event.pull_request.number }}, + "index_result": "${{ steps.check_index.outputs.result || 'fail' }}", + "workflows_result": "${{ steps.check_workflows.outputs.result || 'fail' }}" + } + EOF + cp /tmp/index_errors.txt /tmp/spellcheck-results/index_errors.txt 2>/dev/null || touch /tmp/spellcheck-results/index_errors.txt + cp /tmp/workflows_errors.txt /tmp/spellcheck-results/workflows_errors.txt 2>/dev/null || touch /tmp/spellcheck-results/workflows_errors.txt + + - name: Upload results artifact + if: always() && github.event_name == 'pull_request' + uses: actions/upload-artifact@v4 + with: + name: spellcheck-results + path: /tmp/spellcheck-results/ + retention-days: 1 + + - name: Fail if any spellcheck failed + if: | + steps.check_index.outputs.result == 'fail' || + steps.check_workflows.outputs.result == 'fail' + run: | + echo "❌ Spellcheck failed — see PR comment for details" + exit 1 diff --git a/.github/workflows/sync-custom-nodes.yml b/.github/workflows/sync-custom-nodes.yml index f5b1a91ad..4ceaba194 100644 --- a/.github/workflows/sync-custom-nodes.yml +++ b/.github/workflows/sync-custom-nodes.yml @@ -38,6 +38,10 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} ref: ${{ github.head_ref }} fetch-depth: 0 + sparse-checkout: | + templates/*.json + scripts/sync_custom_nodes.py + sparse-checkout-cone-mode: false - name: Set up Python uses: actions/setup-python@v5 diff --git a/.github/workflows/validate-blueprints.yml b/.github/workflows/validate-blueprints.yml index 359c2a328..8eae69d60 100644 --- a/.github/workflows/validate-blueprints.yml +++ b/.github/workflows/validate-blueprints.yml @@ -28,6 +28,15 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 + with: + sparse-checkout: | + blueprints/ + blueprints_bundles.json + bundles.json + scripts/ + packages/core/ + packages/blueprints/ + sparse-checkout-cone-mode: false - name: Set up Python uses: actions/setup-python@v5 diff --git a/.github/workflows/validate-manifests.yml b/.github/workflows/validate-manifests.yml index ede0e66ae..504b56bf8 100644 --- a/.github/workflows/validate-manifests.yml +++ b/.github/workflows/validate-manifests.yml @@ -26,7 +26,16 @@ jobs: steps: - name: Checkout code uses: actions/checkout@v4 - + with: + sparse-checkout: | + templates/*.json + blueprints/ + bundles.json + blueprints_bundles.json + scripts/ + packages/ + sparse-checkout-cone-mode: false + - name: Set up Python uses: actions/setup-python@v4 with: @@ -45,9 +54,14 @@ jobs: - name: Check for orphaned templates run: | echo "🔍 Checking for templates without manifest entries..." + excluded=$(python3 scripts/list_pip_excluded.py) cd templates orphaned=0 for json_file in *.json; do + template_name="${json_file%.json}" + if echo "$excluded" | grep -qx "$template_name"; then + continue + fi if ! grep -q "\"$json_file\"" ../packages/core/src/comfyui_workflow_templates_core/manifest.json; then echo "⚠️ Orphaned template: $json_file" orphaned=$((orphaned + 1)) diff --git a/.github/workflows/validate-templates.yml b/.github/workflows/validate-templates.yml index 148e5688f..aee64f0df 100644 --- a/.github/workflows/validate-templates.yml +++ b/.github/workflows/validate-templates.yml @@ -20,7 +20,14 @@ jobs: steps: - name: Checkout repository uses: actions/checkout@v4 - + with: + sparse-checkout: | + templates/ + scripts/validate_templates.py + scripts/validate_thumbnails.py + scripts/list_pip_excluded.py + sparse-checkout-cone-mode: false + - name: Set up Python uses: actions/setup-python@v5 with: diff --git a/.github/workflows/version-check.yml b/.github/workflows/version-check.yml index d1089d4f1..ef27671db 100644 --- a/.github/workflows/version-check.yml +++ b/.github/workflows/version-check.yml @@ -19,6 +19,8 @@ on: paths: - 'templates/**' - 'bundles.json' + - 'blueprints/**' + - 'blueprints_bundles.json' - 'pyproject.toml' permissions: @@ -41,6 +43,15 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} ref: ${{ github.head_ref }} fetch-depth: 0 + sparse-checkout: | + templates/ + blueprints/ + bundles.json + blueprints_bundles.json + scripts/ + packages/ + pyproject.toml + sparse-checkout-cone-mode: false - name: Set up Python uses: actions/setup-python@v4 @@ -53,8 +64,139 @@ jobs: # This step validates that all templates are assigned to bundles # and generates updated manifest files with SHA256 hashes # Must run BEFORE version detection so core package manifest.json changes are detected + # + # Pip exclusion filter (cloud-only + requiresCustomNodes templates) is ON by default. + # To temporarily disable it and ship all templates, add --no-filter: + # python scripts/sync_bundles.py --no-filter python scripts/sync_bundles.py - echo "✅ Manifest sync complete" + echo "✅ Template manifest sync complete" + + echo "=== Syncing blueprints manifests ===" + # Sync blueprints from blueprints/ to packages/blueprints/ + python scripts/sync_blueprints.py + echo "✅ Blueprint manifest sync complete" + + - name: Check package sizes + id: size_check + run: | + echo "=== Checking package sizes against PyPI 100MB limit ===" + + # PyPI hard limit is 100MB per file. We warn at 85MB and error at 95MB. + WARN_MB=85 + ERROR_MB=95 + LIMIT_MB=100 + + WARN_BYTES=$((WARN_MB * 1024 * 1024)) + ERROR_BYTES=$((ERROR_MB * 1024 * 1024)) + + PACKAGES=( + "media_api:packages/media_api/src/comfyui_workflow_templates_media_api" + "media_image:packages/media_image/src/comfyui_workflow_templates_media_image" + "media_video:packages/media_video/src/comfyui_workflow_templates_media_video" + "media_other:packages/media_other/src/comfyui_workflow_templates_media_other" + ) + + HAS_WARNING=false + HAS_ERROR=false + COMMENT_ROWS="" + + for ENTRY in "${PACKAGES[@]}"; do + PKG_NAME="${ENTRY%%:*}" + PKG_DIR="${ENTRY##*:}" + + if [ ! -d "$PKG_DIR" ]; then + echo "⚠️ Directory not found: $PKG_DIR (skipping)" + continue + fi + + SIZE_BYTES=$(du -sb "$PKG_DIR" | cut -f1) + SIZE_MB=$(echo "scale=1; $SIZE_BYTES / 1048576" | bc) + + if [ "$SIZE_BYTES" -ge "$ERROR_BYTES" ]; then + STATUS="🔴 CRITICAL" + HAS_ERROR=true + elif [ "$SIZE_BYTES" -ge "$WARN_BYTES" ]; then + STATUS="🟡 WARNING" + HAS_WARNING=true + else + STATUS="🟢 OK" + fi + + echo "$STATUS $PKG_NAME: ${SIZE_MB}MB / ${LIMIT_MB}MB limit" + COMMENT_ROWS="${COMMENT_ROWS}| \`comfyui-workflow-templates-${PKG_NAME/_/-}\` | ${SIZE_MB} MB | ${LIMIT_MB} MB | ${STATUS} |\n" + done + + # Write outputs for the comment step + echo "has_warning=$HAS_WARNING" >> "$GITHUB_OUTPUT" + echo "has_error=$HAS_ERROR" >> "$GITHUB_OUTPUT" + + # Build the full comment body and store it (multi-line via delimiter) + { + echo "COMMENT_BODY< **One or more packages are critically close to (≥${ERROR_MB} MB) the PyPI 100 MB per-file upload limit.**" + echo "> Publishing will likely fail unless templates are moved to a different bundle." + elif [ "$HAS_WARNING" = "true" ]; then + echo "## ⚠️ Package Size Warning — Approaching PyPI 100 MB Limit" + echo "" + echo "> One or more packages are approaching (≥${WARN_MB} MB) the PyPI 100 MB per-file upload limit." + echo "> Consider moving some templates to a different bundle before this reaches the limit." + else + echo "## ✅ Package Sizes OK" + echo "" + echo "> All packages are within safe limits." + fi + echo "" + echo "| Package | Estimated Size | Limit | Status |" + echo "|---------|---------------|-------|--------|" + printf "$COMMENT_ROWS" + echo "" + echo "Thresholds: 🟡 ≥${WARN_MB} MB (warning) · 🔴 ≥${ERROR_MB} MB (critical) · Limit: ${LIMIT_MB} MB" + echo "" + echo "_Sizes are measured from the synced package source directories. Actual sdist/wheel may differ slightly._" + echo "EOF" + } >> "$GITHUB_OUTPUT" + + if [ "$HAS_ERROR" = "true" ]; then + echo "" + echo "❌ One or more packages are critically close to the PyPI 100MB limit!" + echo " Move templates to another bundle in bundles.json before publishing." + fi + + - name: Post package size comment on PR + if: always() + uses: actions/github-script@v7 + env: + COMMENT_BODY: ${{ steps.size_check.outputs.COMMENT_BODY }} + with: + script: | + const marker = ''; + const body = marker + '\n' + process.env.COMMENT_BODY; + + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + + const existing = comments.find(c => c.body.includes(marker)); + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body, + }); + } - name: Auto-bump package versions if needed run: | diff --git a/.gitignore b/.gitignore index a57340acf..32f9178e0 100644 --- a/.gitignore +++ b/.gitignore @@ -14,6 +14,9 @@ dist dist-ssr *.local +# Claude Code settings +.claude/settings.json + # Editor directories and files .vscode/* *.code-workspace @@ -62,7 +65,7 @@ packages/*/src/**/*.egg-info/ # Workflow templates assets # Hosted on https://github.com/Comfy-Org/workflow_templates /public/templates/ - +.claude/settings.json # Temporary repository directory templates_repo/ @@ -95,3 +98,5 @@ model_analysis_report.md *.pyzw *.pyzwz __pycache__/ +.claude/settings.json +dictionary.dic diff --git a/CLAUDE.md b/CLAUDE.md index 654358a83..292b752eb 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -37,12 +37,14 @@ workflow_templates/ ## Two Distinct Systems ### System 1: Template Packages (Python/PyPI) + - Templates are grouped into 4 media bundles via `bundles.json` - `scripts/sync_bundles.py` copies templates + thumbnails into package directories - Published to PyPI as `comfyui-workflow-templates-*` packages - Version lives in root `pyproject.toml` (currently 0.8.43) ### System 2: Astro Website (`site/`) + - **Independent project** — own `package.json`, `pnpm-lock.yaml`, tooling - Consumes templates from `../templates/` via sync scripts - AI content generation pipeline (GPT-4o) enriches template pages @@ -61,6 +63,7 @@ templates/index.json + *.json + *.webp ## Key Commands ### Root (template management) + ```bash npm run sync # Sync bundle manifests + assets to packages python scripts/validate_templates.py # Validate template JSON @@ -68,6 +71,7 @@ python scripts/sync_data.py --templates-dir templates # Sync i18n translations ``` ### Site (in site/ directory) + ```bash pnpm install # Install deps (required first) pnpm run dev # Dev server at localhost:4321 @@ -84,7 +88,9 @@ pnpm run test:e2e # Playwright E2E tests ## Template Structure ### index.json Entry + Each template in `templates/index.json` has: + - `name` — Must match the JSON filename (snake_case, no extension) - `title`, `description` — Display metadata - `mediaType` — "image" | "video" | "audio" | "3d" @@ -94,35 +100,45 @@ Each template in `templates/index.json` has: - `tutorialUrl`, `openSource`, `requiresCustomNodes`, `io` ### Workflow JSON Files + Standard ComfyUI workflow format with embedded model metadata: + - `properties.models[]` — Download URLs, SHA256 hashes, target directories - `properties.cnr_id` + `properties.ver` — Node version pinning ### Thumbnails + - Named `{template_name}-1.webp` (primary), `{template_name}-2.webp` (comparison) - WebP format, target <1MB, 512×512 or 768×768 ## Bundle Assignment + Templates in `bundles.json` map to Python packages: -| Bundle | Contents | -|--------|----------| -| `media-api` | Templates using external APIs | -| `media-image` | Image generation/editing | -| `media-video` | Video generation | -| `media-other` | Audio, 3D, utilities | + + +| Bundle | Contents | +| ------------- | ----------------------------- | +| `media-api` | Templates using external APIs | +| `media-image` | Image generation/editing | +| `media-video` | Video generation | +| `media-other` | Audio, 3D, utilities | + ## Internationalization ### 11 Supported Languages + en (default), zh, zh-TW, ja, ko, es, fr, ru, tr, ar, pt-BR ### Template i18n + - Master: `templates/index.json` (English) - Locales: `templates/index.{locale}.json` - Translation tracking: `scripts/i18n.json` - Sync: `python scripts/sync_data.py --templates-dir templates` ### Site i18n + - Config: `site/src/i18n/config.ts` - UI strings: `site/src/i18n/ui.ts` - URL pattern: English at `/templates/`, others at `/{locale}/templates/` @@ -131,6 +147,7 @@ en (default), zh, zh-TW, ja, ko, es, fr, ru, tr, ar, pt-BR ## Site Architecture (Astro 5) ### Key Directories + - `site/src/pages/` — Route pages ([slug].astro, [locale]/templates/) - `site/src/components/` — Astro (.astro) and Vue (.vue) components - `site/src/composables/` — Shared Vue 3 composables for cross-island state @@ -141,15 +158,18 @@ en (default), zh, zh-TW, ja, ko, es, fr, ru, tr, ar, pt-BR - `site/overrides/templates/` — Human-edited content (survives AI regeneration) ### Island Architecture (Astro + Vue 3) -Astro renders pages as static HTML. Interactive sections use Vue 3 components mounted as **islands** via `client:*` directives. Each island is a separate Vue app instance. + +Astro renders pages as static HTML. Interactive sections use Vue 3 components mounted as **islands** via `client:`* directives. Each island is a separate Vue app instance. **When to use Astro vs Vue:** + - `.astro` — Static content, layouts, SEO markup, data fetching (`getCollection()`, API calls) - `.vue` with `client:load` — Interactive UI that needs reactivity on page load (filters, search, drawers) - `.vue` with `client:visible` — Interactive UI that can wait until scrolled into view (below-fold widgets) -- `.vue` without `client:*` — SSR-only Vue (renders HTML at build time, no client JS) +- `.vue` without `client:`* — SSR-only Vue (renders HTML at build time, no client JS) **Data flow — Astro page → Vue island:** + ``` [page].astro Vue island ───────────── ────────── @@ -157,19 +177,23 @@ getCollection('templates') → serialize to plain objects → pass as props via client:load → defineProps() ``` + Always serialize Astro content collection entries to plain objects before passing to Vue. Vue islands cannot receive Astro class instances, `Date` objects, or `Map`/`Set` — only JSON-serializable data. **Cross-island communication — Vue island ↔ Vue island:** Each `client:load` creates a separate Vue app, so `provide`/`inject` and `$emit` do NOT work across islands. Use shared composables with module-level reactive state: + ``` site/src/composables/useHubStore.ts (module-level refs) ├── HubBrowse.vue (island 1) imports useHubStore() └── SearchPopover.vue (island 2) imports useHubStore(), watches shared ref ``` + Module-level `ref()` values are singletons in the browser bundle — all islands that import the same composable share the same reactive state. **Astro → Vue runtime bridge:** When a DOM element in Astro markup (e.g. a hamburger button) needs to trigger Vue state, the Vue island attaches a listener to that element by ID in `onMounted()`: + ```ts // In the Vue island's ``` diff --git a/site/knowledge/concepts/_template-index.json b/site/knowledge/concepts/_template-index.json index 7c52c1ec5..2ecc4b077 100644 --- a/site/knowledge/concepts/_template-index.json +++ b/site/knowledge/concepts/_template-index.json @@ -163,8 +163,8 @@ "api_topaz_image_enhance", "api_topaz_video_enhance", "api_wavespeed_flshvsr_video_upscale", - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix", + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix", "ultility_hitpaw_general_image_enhance", "ultility_hitpaw_video_enhance", "utility-gan_upscaler", @@ -182,9 +182,9 @@ ], "video-generation": [ "03_video_wan2_2_14B_i2v_subgraphed", - "api_bytedace_seedance1_5_flf2v", - "api_bytedace_seedance1_5_image_to_video", - "api_bytedace_seedance1_5_text_to_video", + "api_bytedance_seedance1_5_flf2v", + "api_bytedance_seedance1_5_image_to_video", + "api_bytedance_seedance1_5_text_to_video", "api_bytedance_flf2v", "api_bytedance_image_to_video", "api_bytedance_text_to_video", diff --git a/site/knowledge/index.json b/site/knowledge/index.json index 4911ae160..f5f3b7314 100644 --- a/site/knowledge/index.json +++ b/site/knowledge/index.json @@ -1021,14 +1021,14 @@ "nodes": [], "customNodes": [] }, - "api_bytedace_seedance1_5_image_to_video": { + "api_bytedance_seedance1_5_image_to_video": { "models": ["seedance"], "concepts": ["video-generation"], "tutorial": "partner-nodes-luma-image-to-video.md", "nodes": [], "customNodes": [] }, - "api_bytedace_seedance1_5_flf2v": { + "api_bytedance_seedance1_5_flf2v": { "models": ["seedance"], "concepts": ["api", "video-generation"], "tutorial": "video-hunyuan-video-1-5.md", @@ -1343,7 +1343,7 @@ "nodes": [], "customNodes": [] }, - "api_bytedace_seedance1_5_text_to_video": { + "api_bytedance_seedance1_5_text_to_video": { "models": ["seedance"], "concepts": ["api", "video-generation"], "tutorial": "partner-nodes-luma-text-to-video.md", @@ -1952,14 +1952,14 @@ "nodes": [], "customNodes": [] }, - "api_wavespped_seedvr2_ai_image_fix": { + "api_wavespeed_seedvr2_ai_image_fix": { "models": ["seedvr2", "wavespeed"], "concepts": ["api", "upscaling"], "tutorial": "partner-nodes-stable-diffusion-3-5-image.md", "nodes": [], "customNodes": [] }, - "api_wavespped_image_upscale": { + "api_wavespeed_image_upscale": { "models": ["wavespeed"], "concepts": ["api", "upscaling"], "tutorial": "basic-upscale.md", diff --git a/site/knowledge/nodes/_template-index.json b/site/knowledge/nodes/_template-index.json index e832736c4..000e82c25 100644 --- a/site/knowledge/nodes/_template-index.json +++ b/site/knowledge/nodes/_template-index.json @@ -145,13 +145,13 @@ "BlockifyMask": ["video_wan2_2_14B_animate"], "BriaImageEditNode": ["api_bria_image_edit", "api_bria_image_outpainting"], "ByteDanceFirstLastFrameNode": [ - "api_bytedace_seedance1_5_flf2v", + "api_bytedance_seedance1_5_flf2v", "api_bytedance_flf2v", "templates-3D_logo_texture_animation", "templates-textured_logotype-v2.1" ], "ByteDanceImageToVideoNode": [ - "api_bytedace_seedance1_5_image_to_video", + "api_bytedance_seedance1_5_image_to_video", "api_bytedance_image_to_video", "templates-textured_logo_elements" ], @@ -163,7 +163,7 @@ "templates-product_scene_relight" ], "ByteDanceTextToVideoNode": [ - "api_bytedace_seedance1_5_text_to_video", + "api_bytedance_seedance1_5_text_to_video", "api_bytedance_text_to_video" ], "CFGGuider": [ @@ -629,8 +629,8 @@ "ImageCompare": [ "api_bria_image_edit", "api_magnific_skin_enhancer", - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix", + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix", "ultility_hitpaw_general_image_enhance", "utility_interpolation_image_upscale", "utility_recraft_creative_image_upscale", @@ -862,8 +862,8 @@ "api_bfl_flux_pro_t2i", "api_bria_image_edit", "api_bria_image_outpainting", - "api_bytedace_seedance1_5_flf2v", - "api_bytedace_seedance1_5_image_to_video", + "api_bytedance_seedance1_5_flf2v", + "api_bytedance_seedance1_5_image_to_video", "api_bytedance_flf2v", "api_bytedance_image_to_video", "api_bytedance_seedream4", @@ -934,8 +934,8 @@ "api_vidu_start_end_to_video", "api_wan2_6_i2v", "api_wan_image_to_video", - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix", + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix", "flux1_dev_uso_reference_image_gen", "flux_canny_model_example", "flux_depth_lora_example", @@ -1205,8 +1205,8 @@ "api_wan_image_to_video", "api_wan_text_to_image", "api_wan_text_to_video", - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix", + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix", "audio-chatterbox_tts", "audio-chatterbox_tts_dialog", "audio-chatterbox_tts_multilingual", @@ -1836,8 +1836,8 @@ "api_stability_ai_stable_image_ultra_t2i", "api_topaz_image_enhance", "api_wan_text_to_image", - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix", + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix", "default", "flux1_dev_uso_reference_image_gen", "flux1_krea_dev", @@ -1955,9 +1955,9 @@ ], "SaveVideo": [ "03_video_wan2_2_14B_i2v_subgraphed", - "api_bytedace_seedance1_5_flf2v", - "api_bytedace_seedance1_5_image_to_video", - "api_bytedace_seedance1_5_text_to_video", + "api_bytedance_seedance1_5_flf2v", + "api_bytedance_seedance1_5_image_to_video", + "api_bytedance_seedance1_5_text_to_video", "api_bytedance_flf2v", "api_bytedance_image_to_video", "api_bytedance_text_to_video", @@ -2416,8 +2416,8 @@ ], "WavespeedFlashVSRNode": ["api_wavespeed_flshvsr_video_upscale"], "WavespeedImageUpscaleNode": [ - "api_wavespped_image_upscale", - "api_wavespped_seedvr2_ai_image_fix" + "api_wavespeed_image_upscale", + "api_wavespeed_seedvr2_ai_image_fix" ], "a67caa28-5f85-4917-8396-36004960dd30": [ "image_flux2_klein_text_to_image", diff --git a/site/knowledge/tutorials/_template-mapping.json b/site/knowledge/tutorials/_template-mapping.json index 5f9a17933..fed8495b8 100644 --- a/site/knowledge/tutorials/_template-mapping.json +++ b/site/knowledge/tutorials/_template-mapping.json @@ -583,11 +583,11 @@ "tutorial": "video-wan2-2-animate.md", "matchType": "direct" }, - "api_bytedace_seedance1_5_image_to_video": { + "api_bytedance_seedance1_5_image_to_video": { "tutorial": "partner-nodes-luma-image-to-video.md", "matchType": "direct" }, - "api_bytedace_seedance1_5_flf2v": { + "api_bytedance_seedance1_5_flf2v": { "tutorial": "video-hunyuan-video-1-5.md", "matchType": "direct" }, @@ -763,7 +763,7 @@ "tutorial": "video-cosmos-predict2-video2world.md", "matchType": "direct" }, - "api_bytedace_seedance1_5_text_to_video": { + "api_bytedance_seedance1_5_text_to_video": { "tutorial": "partner-nodes-luma-text-to-video.md", "matchType": "direct" }, @@ -1111,11 +1111,11 @@ "tutorial": "basic-upscale.md", "matchType": "partial" }, - "api_wavespped_seedvr2_ai_image_fix": { + "api_wavespeed_seedvr2_ai_image_fix": { "tutorial": "partner-nodes-stable-diffusion-3-5-image.md", "matchType": "partial" }, - "api_wavespped_image_upscale": { + "api_wavespeed_image_upscale": { "tutorial": "basic-upscale.md", "matchType": "partial" }, diff --git a/site/knowledge/workflow-text/_audit.json b/site/knowledge/workflow-text/_audit.json index 8a4ab34ee..63e9abbb6 100644 --- a/site/knowledge/workflow-text/_audit.json +++ b/site/knowledge/workflow-text/_audit.json @@ -217,19 +217,19 @@ "groupTitles": [], "customLabels": [] }, - "api_bytedace_seedance1_5_flf2v": { + "api_bytedance_seedance1_5_flf2v": { "authorNotes": 0, "examplePrompts": [], "groupTitles": [], "customLabels": [] }, - "api_bytedace_seedance1_5_image_to_video": { + "api_bytedance_seedance1_5_image_to_video": { "authorNotes": 0, "examplePrompts": [], "groupTitles": [], "customLabels": [] }, - "api_bytedace_seedance1_5_text_to_video": { + "api_bytedance_seedance1_5_text_to_video": { "authorNotes": 0, "examplePrompts": [], "groupTitles": [], @@ -1427,13 +1427,13 @@ "groupTitles": [], "customLabels": [] }, - "api_wavespped_image_upscale": { + "api_wavespeed_image_upscale": { "authorNotes": 85, "examplePrompts": [], "groupTitles": [], "customLabels": [] }, - "api_wavespped_seedvr2_ai_image_fix": { + "api_wavespeed_seedvr2_ai_image_fix": { "authorNotes": 85, "examplePrompts": [], "groupTitles": [], diff --git a/site/package-lock.json b/site/package-lock.json index abfc2cb10..1c317524b 100644 --- a/site/package-lock.json +++ b/site/package-lock.json @@ -1482,7 +1482,7 @@ "node_modules/@humanwhocodes/module-importer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", - "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtFL2VA==", "dev": true, "license": "Apache-2.0", "engines": { diff --git a/site/package.json b/site/package.json index 14291d68a..60710e8bf 100644 --- a/site/package.json +++ b/site/package.json @@ -74,10 +74,10 @@ "posthog-js": "^1.360.0", "reka-ui": "^2.8.0", "satori": "^0.19.1", + "sharp": "^0.34.5", "tailwind-merge": "^3.5.0", "tailwindcss": "^4.1.18", - "vue": "^3.5.28", - "web-vitals": "^4.2.4" + "vue": "^3.5.28" }, "devDependencies": { "@astrojs/check": "^0.9.6", diff --git a/site/pnpm-lock.yaml b/site/pnpm-lock.yaml index cd11f843d..120bc019f 100644 --- a/site/pnpm-lock.yaml +++ b/site/pnpm-lock.yaml @@ -62,6 +62,9 @@ importers: satori: specifier: ^0.19.1 version: 0.19.3 + sharp: + specifier: ^0.34.5 + version: 0.34.5 tailwind-merge: specifier: ^3.5.0 version: 3.5.0 @@ -71,9 +74,6 @@ importers: vue: specifier: ^3.5.28 version: 3.5.29(typescript@5.9.3) - web-vitals: - specifier: ^4.2.4 - version: 4.2.4 optionalDependencies: canvas: specifier: ^2.11.0 @@ -4764,9 +4764,6 @@ packages: resolution: {integrity: sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug==} engines: {node: '>= 14'} - web-vitals@4.2.4: - resolution: {integrity: sha512-r4DIlprAGwJ7YM11VZp4R884m0Vmgr6EAKe3P+kO0PPj3Unqyvv59rczf6UiGcb9Z8QxZVcqKNwv/g0WNdWwsw==} - web-vitals@5.1.0: resolution: {integrity: sha512-ArI3kx5jI0atlTtmV0fWU3fjpLmq/nD3Zr1iFFlJLaqa5wLBkUSzINwBPySCX/8jRyjlmy1Volw1kz1g9XE4Jg==} @@ -5552,8 +5549,7 @@ snapshots: '@humanwhocodes/retry@0.4.3': {} - '@img/colour@1.0.0': - optional: true + '@img/colour@1.0.0': {} '@img/sharp-darwin-arm64@0.34.5': optionalDependencies: @@ -9390,7 +9386,6 @@ snapshots: '@img/sharp-win32-arm64': 0.34.5 '@img/sharp-win32-ia32': 0.34.5 '@img/sharp-win32-x64': 0.34.5 - optional: true shebang-command@2.0.0: dependencies: @@ -10021,8 +10016,6 @@ snapshots: web-streams-polyfill@4.0.0-beta.3: {} - web-vitals@4.2.4: {} - web-vitals@5.1.0: {} webidl-conversions@3.0.1: {} diff --git a/site/public/fonts/PPFormula-Bold.woff2 b/site/public/fonts/PPFormula-Bold.woff2 new file mode 100644 index 000000000..faec417e0 Binary files /dev/null and b/site/public/fonts/PPFormula-Bold.woff2 differ diff --git a/site/public/fonts/PPFormula-Extrabold.woff2 b/site/public/fonts/PPFormula-Extrabold.woff2 new file mode 100644 index 000000000..e5abf35ba Binary files /dev/null and b/site/public/fonts/PPFormula-Extrabold.woff2 differ diff --git a/site/public/fonts/PPFormula-Medium.woff2 b/site/public/fonts/PPFormula-Medium.woff2 new file mode 100644 index 000000000..3065394cb Binary files /dev/null and b/site/public/fonts/PPFormula-Medium.woff2 differ diff --git a/site/public/fonts/PPFormula-Regular.woff2 b/site/public/fonts/PPFormula-Regular.woff2 new file mode 100644 index 000000000..9d5610d01 Binary files /dev/null and b/site/public/fonts/PPFormula-Regular.woff2 differ diff --git a/site/public/fonts/PPFormula-Semibold.woff2 b/site/public/fonts/PPFormula-Semibold.woff2 new file mode 100644 index 000000000..54b89c53f Binary files /dev/null and b/site/public/fonts/PPFormula-Semibold.woff2 differ diff --git a/site/public/og-default.png b/site/public/og-default.png deleted file mode 100644 index ee26c8a80..000000000 Binary files a/site/public/og-default.png and /dev/null differ diff --git a/site/public/workflows/og-default.png b/site/public/workflows/og-default.png new file mode 100644 index 000000000..8295b5e90 Binary files /dev/null and b/site/public/workflows/og-default.png differ diff --git a/site/scripts/lib/filesystem.ts b/site/scripts/lib/filesystem.ts index f943ef072..5ac05a949 100644 --- a/site/scripts/lib/filesystem.ts +++ b/site/scripts/lib/filesystem.ts @@ -2,6 +2,7 @@ import * as fs from 'node:fs'; import * as path from 'node:path'; import { ASSET_EXTENSIONS, DEFAULT_LOCALE, LOGO_FILENAME_FIXES } from './constants'; import { + REPO_ROOT, TEMPLATES_DIR, CONTENT_DIR, THUMBNAILS_DIR, @@ -11,6 +12,8 @@ import { AVATARS_SRC_DIR, AVATARS_DEST_DIR, } from './paths'; + +export const DETAIL_IMAGES_DIR = path.join(THUMBNAILS_DIR, 'detail'); import { logger } from './logger'; export function escapeRegExp(string: string): string { @@ -46,6 +49,32 @@ export function copyThumbnails(templateName: string): void { } } +/** + * Copy detail images specified in the `thumbnail` field of index.json into + * DETAIL_IMAGES_DIR. Paths are relative to the repo root (e.g. "input/foo.png", + * "output/bar.mp4"). Returns the flat filenames for use as `detailImages`. + */ +export function copyDetailImages(thumbnailPaths: string[]): string[] { + if (!fs.existsSync(DETAIL_IMAGES_DIR)) { + fs.mkdirSync(DETAIL_IMAGES_DIR, { recursive: true }); + } + const result: string[] = []; + for (const relPath of thumbnailPaths) { + const src = path.join(REPO_ROOT, relPath); + if (!fs.existsSync(src)) { + logger.warn(` Warning: detail image not found: ${relPath}`); + continue; + } + const destName = path.basename(relPath); + const dest = path.join(DETAIL_IMAGES_DIR, destName); + if (!fs.existsSync(dest) || fs.statSync(src).mtime > fs.statSync(dest).mtime) { + fs.copyFileSync(src, dest); + } + result.push(`detail/${destName}`); + } + return result; +} + export function copyWorkflowJson(templateName: string): void { const src = path.join(TEMPLATES_DIR, `${templateName}.json`); const dest = path.join(WORKFLOWS_DIR, `${templateName}.json`); diff --git a/site/scripts/lib/paths.ts b/site/scripts/lib/paths.ts index ccc92c9fd..58908fb11 100644 --- a/site/scripts/lib/paths.ts +++ b/site/scripts/lib/paths.ts @@ -3,6 +3,7 @@ import { fileURLToPath } from 'node:url'; const __dirname = path.dirname(fileURLToPath(import.meta.url)); export const SITE_DIR = path.resolve(__dirname, '..', '..'); +export const REPO_ROOT = path.join(SITE_DIR, '..'); export const TEMPLATES_DIR = path.join(SITE_DIR, '..', 'templates'); export const CONTENT_DIR = path.join(SITE_DIR, 'src', 'content', 'templates'); export const THUMBNAILS_DIR = path.join(SITE_DIR, 'public', 'workflows', 'thumbnails'); diff --git a/site/scripts/lib/search/build-index.ts b/site/scripts/lib/search/build-index.ts index 1519bf914..d69a6656a 100644 --- a/site/scripts/lib/search/build-index.ts +++ b/site/scripts/lib/search/build-index.ts @@ -20,18 +20,24 @@ interface SearchDocument { username: string; creatorName: string; // Stored fields for display (not searched) + name: string; + slug: string; thumbnail: string; usage: number; tagsArray: string[]; } -interface CreatorsJson { - [username: string]: { - displayName: string; - handle: string; - summary?: string; - social?: string; - }; +/** Shape returned by GET /api/hub/workflows/index */ +interface IndexEntry { + name?: string; + title?: string; + description?: string; + tags?: string[]; + models?: string[]; + mediaType?: string; + username?: string; + thumbnailUrl?: string; + shareId?: string; } const MEDIA_TYPE_LABELS: Record = { @@ -41,46 +47,133 @@ const MEDIA_TYPE_LABELS: Record = { '3d': '3D', }; -export async function buildSearchIndex(): Promise { - const startTime = Date.now(); +/** + * Fetch workflow index entries from the hub API. + * Returns `null` when no API URL is configured (local dev). + * Throws when the API is configured but returns an error or empty response. + */ +async function fetchIndexEntries(): Promise { + const apiUrl = (process.env.PUBLIC_HUB_API_URL || '').replace(/\/$/, ''); + if (!apiUrl) return null; + + const approvedOnly = process.env.PUBLIC_APPROVED_ONLY === 'true'; + const statuses = approvedOnly ? 'approved' : 'pending,approved,rejected,deprecated'; + const res = await fetch(`${apiUrl}/api/hub/workflows/index?status=${statuses}`); + + if (!res.ok) { + throw new Error(`Hub API returned ${res.status}: ${res.statusText}`); + } + const entries = (await res.json()) as IndexEntry[]; + if (entries.length === 0) { + throw new Error('Hub API returned empty index'); + } + return entries; +} - // Load creators mapping - const creatorsPath = path.join(SITE_DIR, 'creators.json'); - let creators: CreatorsJson = {}; - if (fs.existsSync(creatorsPath)) { - creators = JSON.parse(fs.readFileSync(creatorsPath, 'utf-8')); +async function fetchProfileDisplayName(username: string): Promise { + const apiUrl = (process.env.PUBLIC_HUB_API_URL || '').replace(/\/$/, ''); + if (!apiUrl) return username; + try { + const res = await fetch(`${apiUrl}/api/hub/profiles/${encodeURIComponent(username)}`); + if (!res.ok) return username; + const profile = (await res.json()) as { display_name?: string }; + return profile.display_name || username; + } catch { + return username; } +} - // Read all English template JSONs from synced content - const files = fs.readdirSync(CONTENT_DIR).filter((f) => f.endsWith('.json') && !f.includes('/')); +export async function buildSearchIndex(): Promise { + const startTime = Date.now(); const documents: SearchDocument[] = []; - for (const file of files) { - const filePath = path.join(CONTENT_DIR, file); - const data = JSON.parse(fs.readFileSync(filePath, 'utf-8')); - - const username = data.username || 'ComfyUI'; - const creatorInfo = creators[username]; - const creatorName = creatorInfo?.displayName || username; - const tags: string[] = data.tags || []; - const models: string[] = data.models || []; - const thumbnails: string[] = data.thumbnails || []; - - documents.push({ - id: data.name, - title: data.title || data.name, - description: data.description || '', - tags: tags.map(tagSearchText).join(' '), - models: models.join(' '), - mediaType: data.mediaType || 'image', - mediaTypeLabel: MEDIA_TYPE_LABELS[data.mediaType] || data.mediaType, - username, - creatorName, - thumbnail: thumbnails[0] || '', - usage: data.usage || 0, - tagsArray: tags.map(tagDisplayName), - }); + // Hub API is the primary source; content collection is only for local/offline builds + const hubEntries = await fetchIndexEntries(); + if (hubEntries) { + logger.info(`Building search index from hub API (${hubEntries.length} entries)`); + + // Resolve display names from profile API + const usernames = [...new Set(hubEntries.map((e) => e.username).filter(Boolean) as string[])]; + const displayNames = new Map(); + const results = await Promise.allSettled( + usernames.map(async (u) => ({ username: u, displayName: await fetchProfileDisplayName(u) })) + ); + for (const r of results) { + if (r.status === 'fulfilled') { + displayNames.set(r.value.username, r.value.displayName); + } + } + + for (const data of hubEntries) { + const username = data.username || 'ComfyUI'; + const creatorName = displayNames.get(username) || username; + const tags = data.tags || []; + const models = data.models || []; + const shareId = data.shareId || ''; + const name = data.name || shareId; + // shareId is the unique key — only skip if neither field can identify the workflow. + if (!name) continue; + const id = shareId || name; + const slug = shareId ? `${name}-${shareId}` : name; + + documents.push({ + id, + title: data.title || name, + description: data.description || '', + tags: tags.map(tagSearchText).join(' '), + models: models.join(' '), + mediaType: data.mediaType || 'image', + mediaTypeLabel: MEDIA_TYPE_LABELS[data.mediaType || ''] || data.mediaType || 'image', + username, + creatorName, + name, + slug, + thumbnail: data.thumbnailUrl || '', + usage: 0, + tagsArray: tags.map(tagDisplayName), + }); + } + } else { + // No PUBLIC_HUB_API_URL — local/offline build, use content collection + const files = fs + .readdirSync(CONTENT_DIR) + .filter((f) => f.endsWith('.json') && !f.includes('/')); + logger.warn( + `No hub API configured — building search index from content collection (${files.length} files)` + ); + + for (const file of files) { + const filePath = path.join(CONTENT_DIR, file); + const data = JSON.parse(fs.readFileSync(filePath, 'utf-8')); + + const username = data.username || 'ComfyUI'; + const tags: string[] = data.tags || []; + const models: string[] = data.models || []; + const thumbnails: string[] = data.thumbnails || []; + + const shareId = data.shareId || ''; + const name = data.name || shareId; + const id = shareId || name; + const slug = shareId ? `${name}-${shareId}` : name; + + documents.push({ + id, + title: data.title || name, + description: data.description || '', + tags: tags.map(tagSearchText).join(' '), + models: models.join(' '), + mediaType: data.mediaType || 'image', + mediaTypeLabel: MEDIA_TYPE_LABELS[data.mediaType] || data.mediaType, + username, + creatorName: username, + name, + slug, + thumbnail: thumbnails[0] || '', + usage: data.usage || 0, + tagsArray: tags.map(tagDisplayName), + }); + } } logger.info(`Indexing ${documents.length} templates...`); @@ -108,6 +201,8 @@ export async function buildSearchIndex(): Promise { 'title', 'mediaType', 'mediaTypeLabel', + 'name', + 'slug', 'thumbnail', 'username', 'creatorName', @@ -136,5 +231,7 @@ export async function buildSearchIndex(): Promise { const sizeKB = (Buffer.byteLength(serialized) / 1024).toFixed(1); const duration = ((Date.now() - startTime) / 1000).toFixed(2); - logger.info(`Search index written to public/workflows/search-index.json (${sizeKB} KB, ${duration}s)`); + logger.info( + `Search index written to public/workflows/search-index.json (${sizeKB} KB, ${duration}s)` + ); } diff --git a/site/scripts/lib/sync.ts b/site/scripts/lib/sync.ts index bf4fc1817..58ddba7e2 100644 --- a/site/scripts/lib/sync.ts +++ b/site/scripts/lib/sync.ts @@ -15,6 +15,7 @@ import { import { findThumbnails, copyThumbnails, + copyDetailImages, copyWorkflowJson, ensureDirectories, syncLogos, @@ -27,8 +28,8 @@ import type { TemplateInfo, SyncedTemplate } from './types'; * Check whether a template name denotes a Comfy App. * * The convention is that app template names end with `.app` — for example - * `templates_liveportrat.app`. The corresponding workflow file on disk is - * `{name}.json` (i.e. `templates_liveportrat.app.json`) and thumbnails + * `templates_liveportrait.app`. The corresponding workflow file on disk is + * `{name}.json` (i.e. `templates_liveportrait.app.json`) and thumbnails * follow the same pattern (`{name}-1.webp`). */ function isAppTemplate(name: string): boolean { @@ -57,6 +58,11 @@ function createSyncedTemplate( workflowModels = models.length > 0 ? models : undefined; } + const detailImages = + template.thumbnail && locale === DEFAULT_LOCALE + ? copyDetailImages(template.thumbnail) + : undefined; + return { ...template, username: template.username || 'ComfyUI', @@ -65,6 +71,7 @@ function createSyncedTemplate( metaDescription: template.description.slice(0, 160), suggestedUseCases: [], thumbnails, + detailImages: detailImages && detailImages.length > 0 ? detailImages : undefined, locale: locale === DEFAULT_LOCALE ? undefined : locale, estimatedTime, requiredNodes, diff --git a/site/scripts/lib/types.ts b/site/scripts/lib/types.ts index 5a442bf75..39a5e12cc 100644 --- a/site/scripts/lib/types.ts +++ b/site/scripts/lib/types.ts @@ -5,6 +5,7 @@ export interface TemplateInfo { mediaType: 'image' | 'video' | 'audio' | '3d'; mediaSubtype?: string; thumbnailVariant?: 'compareSlider' | 'hoverDissolve' | 'zoomHover'; + thumbnail?: string[]; tags?: string[]; models?: string[]; date?: string; @@ -46,6 +47,7 @@ export interface SyncedTemplate extends TemplateInfo { metaDescription: string; suggestedUseCases: string[]; thumbnails: string[]; + detailImages?: string[]; locale?: string; estimatedTime?: string; requiredNodes?: RequiredNodeInfo[]; diff --git a/site/scripts/prebuild-parallel.ts b/site/scripts/prebuild-parallel.ts index 82ea06097..eda934bfd 100644 --- a/site/scripts/prebuild-parallel.ts +++ b/site/scripts/prebuild-parallel.ts @@ -83,9 +83,9 @@ async function main(): Promise { const phase2Failed = phase2Results.filter((r) => !r.success); if (phase2Failed.length > 0) { - console.warn('\n⚠️ Phase 2: some tasks failed (non-fatal):'); + console.error('\n❌ Phase 2 failed:'); for (const f of phase2Failed) { - console.warn(` - ${f.name}: ${f.error}`); + console.error(` - ${f.name}: ${f.error}`); } } @@ -108,6 +108,10 @@ async function main(): Promise { if (savedTime > 1000) { console.log(` Saved ~${(savedTime / 1000).toFixed(1)}s vs sequential`); } + + if (phase2Failed.length > 0) { + process.exit(1); + } } main().catch((err) => { diff --git a/site/src/components/SEOHead.astro b/site/src/components/SEOHead.astro index ab0d3ce8c..240f9c401 100644 --- a/site/src/components/SEOHead.astro +++ b/site/src/components/SEOHead.astro @@ -20,7 +20,7 @@ const { title, description, canonicalUrl, - ogImage = '/og-default.png', + ogImage = '/workflows/og-default.png', ogType = 'website', structuredData, hreflangBasePath, @@ -67,6 +67,8 @@ if (!computedHreflangBasePath) { + + diff --git a/site/src/components/TemplateCard.astro b/site/src/components/TemplateCard.astro index d4c7b800c..101c58854 100644 --- a/site/src/components/TemplateCard.astro +++ b/site/src/components/TemplateCard.astro @@ -2,14 +2,15 @@ import ThumbnailDisplay from './ThumbnailDisplay.astro'; import { DEFAULT_LOCALE, type Locale } from '../i18n/config'; import { localizeUrl } from '../i18n/utils'; +import type { MediaType, ThumbnailVariant } from '../lib/hub-api'; interface Props { name: string; title: string; description: string; - mediaType: 'image' | 'video' | 'audio' | '3d'; + mediaType: MediaType; mediaSubtype?: string; - thumbnailVariant?: 'compareSlider' | 'hoverDissolve' | 'zoomHover' | 'hoverZoom'; + thumbnailVariant?: ThumbnailVariant; tags?: string[]; models?: string[]; usage?: number; @@ -61,11 +62,11 @@ const popularity = usage !== undefined ? getPopularity(usage, date) : null; -
+
{ thumbnails.length > 0 ? (

{title}

-

+

{description}

-
+
{ popularity?.label && ( @@ -143,7 +144,7 @@ const popularity = usage !== undefined ? getPopularity(usage, date) : null; { tags.length > 0 && ( - {tags[0]} + {tags[0]} ) }
diff --git a/site/src/components/ThumbnailDisplay.astro b/site/src/components/ThumbnailDisplay.astro index 587dc0d7f..261756faa 100644 --- a/site/src/components/ThumbnailDisplay.astro +++ b/site/src/components/ThumbnailDisplay.astro @@ -10,15 +10,20 @@ * - Default: Simple zoom on hover */ +import type { ThumbnailVariant } from '../lib/hub-api'; +import { getVideoFrameUrl } from '../lib/video-thumbnail'; +import { isVideoFile, isAudioFile } from '../lib/media-utils'; + interface Props { thumbnails: string[]; title: string; - variant?: 'compareSlider' | 'hoverDissolve' | 'zoomHover' | 'hoverZoom'; + variant?: ThumbnailVariant; mediaSubtype?: string; class?: string; loading?: 'lazy' | 'eager'; fetchpriority?: 'high' | 'low' | 'auto'; sizes?: string; + naturalAspect?: boolean; } const { @@ -30,21 +35,36 @@ const { loading = 'lazy', fetchpriority, sizes = '(max-width: 640px) 100vw, (max-width: 1024px) 50vw, 640px', + naturalAspect = false, } = Astro.props; +// If thumbnail is already a full URL, use it directly; otherwise prepend local path +function thumbUrl(thumb: string): string { + if (thumb.startsWith('http://') || thumb.startsWith('https://')) return thumb; + return `/workflows/thumbnails/${thumb}`; +} + +const imgClass = naturalAspect ? 'w-full h-auto' : 'w-full h-full object-cover'; + const primaryThumb = thumbnails[0]; const secondaryThumb = thumbnails[1]; +const primarySrc = primaryThumb ? thumbUrl(primaryThumb) : ''; +const secondarySrc = secondaryThumb ? thumbUrl(secondaryThumb) : ''; const isAnimated = mediaSubtype === 'webp'; const hasSecondImage = thumbnails.length > 1; -const isAudio = primaryThumb?.endsWith('.mp3') || primaryThumb?.endsWith('.webm'); +const isAudio = primaryThumb ? isAudioFile(primaryThumb) : false; +const isVideo = primaryThumb ? isVideoFile(primaryThumb) : false; +const isSecondaryVideo = secondaryThumb ? isVideoFile(secondaryThumb) : false; +const videoPosterUrl = isVideo ? getVideoFrameUrl(primarySrc) : null; +const secondaryVideoPosterUrl = isSecondaryVideo ? getVideoFrameUrl(secondarySrc) : null; --- { !primaryThumb ? (
-
+
) : isAudio ? (
- {title} + {title}
+ ) : isVideo ? ( +
+
+
+ {secondaryThumb && isSecondaryVideo && ( +
+
+ )} + {secondaryThumb && !isSecondaryVideo && ( + {`${title} + )} +
) : variant === 'compareSlider' && hasSecondImage ? (
{`${title}
{`${title} {`${title} {`${title} {title}
) : variant === 'zoomHover' ? (
{title}
) : (
{title}
) @@ -220,6 +302,38 @@ const isAudio = primaryThumb?.endsWith('.mp3') || primaryThumb?.endsWith('.webm'