Skip to content

Commit

Permalink
refactor cache dir
Browse files Browse the repository at this point in the history
  • Loading branch information
feifeibear committed Feb 24, 2025
1 parent ce7ed5f commit a65f479
Show file tree
Hide file tree
Showing 9 changed files with 7 additions and 12 deletions.
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,5 +9,4 @@ profile/
.vscode/
xfuser.egg-info/
dist/*
latte_output.mp4
cache/
*.mp4
2 changes: 1 addition & 1 deletion docs/developer/adding_models/readme.md
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Parallelize New Models with xDiT
# Apply xDiT to new models

xDiT was initially developed to accelerate the inference process of Diffusion Transformers (DiTs) within Huggingface `diffusers`. However, with the rapid emergence of various DiT models, you may find yourself needing to support new models that xDiT hasn't yet accommodated or models that are not officially supported by `diffusers` at all.

Expand Down
4 changes: 2 additions & 2 deletions examples/flux_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
get_tensor_model_parallel_world_size,
get_data_parallel_world_size,
)

from xfuser.model_executor.cache.diffusers_adapters import apply_cache_on_transformer

def main():
parser = FlexibleArgumentParser(description="xFuser Arguments")
Expand Down Expand Up @@ -49,7 +49,7 @@ def main():
parameter_peak_memory = torch.cuda.max_memory_allocated(device=f"cuda:{local_rank}")

pipe.prepare_run(input_config, steps=1)
from xfuser.model_executor.plugins.cache_.diffusers_adapters import apply_cache_on_transformer

use_cache = engine_args.use_teacache or engine_args.use_fbcache
if (use_cache
and get_pipeline_parallel_world_size() == 1
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"""
import importlib
from typing import Type, Dict, TypeVar
from xfuser.model_executor.plugins.cache_.diffusers_adapters.registry import TRANSFORMER_ADAPTER_REGISTRY
from xfuser.model_executor.cache.diffusers_adapters.registry import TRANSFORMER_ADAPTER_REGISTRY


def apply_cache_on_transformer(transformer, *args, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@
import torch
from torch import nn
from diffusers import DiffusionPipeline, FluxTransformer2DModel
from xfuser.model_executor.plugins.cache_.diffusers_adapters.registry import TRANSFORMER_ADAPTER_REGISTRY
from xfuser.model_executor.cache.diffusers_adapters.registry import TRANSFORMER_ADAPTER_REGISTRY

from xfuser.model_executor.plugins.cache_ import utils
from xfuser.model_executor.cache import utils

def create_cached_transformer_blocks(use_cache, transformer, rel_l1_thresh, return_hidden_states_first, num_steps):
cached_transformer_class = {
Expand Down
File renamed without changes.
4 changes: 0 additions & 4 deletions xfuser/model_executor/plugins/cache_/__init__.py

This file was deleted.

0 comments on commit a65f479

Please sign in to comment.