Skip to content

Commit

Permalink
Update readme (#680)
Browse files Browse the repository at this point in the history
Co-authored-by: Xiaoyu Xu <[email protected]>
  • Loading branch information
clackhan and strint authored Mar 1, 2024
1 parent af5d919 commit d3a5629
Show file tree
Hide file tree
Showing 2 changed files with 47 additions and 13 deletions.
42 changes: 38 additions & 4 deletions onediff_diffusers_extensions/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
OneDiffX is a OneDiff Extension for HF diffusers. It provides some acceleration utilities, such as DeepCache.

- [Install and Setup](#install-and-setup)
- [compile_pipe](#compile_pipe)
- [Compile, save and load pipeline](#compile-save-and-load-pipeline)
- [DeepCache Speedup](#deepcache-speedup)
- [Stable Diffusion XL](#run-stable-diffusion-xl-with-onediffx)
- [Stable Diffusion 1.5](#run-stable-diffusion-15-with-onediffx)
Expand All @@ -21,9 +21,9 @@ OneDiffX is a OneDiff Extension for HF diffusers. It provides some acceleration
git clone https://github.com/siliconflow/onediff.git
cd onediff_diffusers_extensions && python3 -m pip install -e .
```
## compile_pipe
Compile diffusers pipeline with `compile_pipe`.
```
## Compile, save and load pipeline
### Compile diffusers pipeline with `compile_pipe`.
```python
import torch
from diffusers import StableDiffusionXLPipeline
Expand All @@ -40,6 +40,40 @@ pipe.to("cuda")
pipe = compile_pipe(pipe)
```

### Save compiled pipeline with `save_pipe`
```python
from diffusers import StableDiffusionXLPipeline
from onediffx import compile_pipe, save_pipe
pipe = StableDiffusionXLPipeline.from_pretrained(
"stabilityai/stable-diffusion-xl-base-1.0",
torch_dtype=torch.float16,
variant="fp16",
use_safetensors=True
)
pipe.to("cuda")

pipe = compile_pipe(pipe)

save_pipe(pipe, dir="cached_pipe")
```

### Load compiled pipeline with `load_pipe`
```python
from diffusers import StableDiffusionXLPipeline
from onediffx import compile_pipe, load_pipe
pipe = StableDiffusionXLPipeline.from_pretrained(
"stabilityai/stable-diffusion-xl-base-1.0",
torch_dtype=torch.float16,
variant="fp16",
use_safetensors=True
)
pipe.to("cuda")

pipe = compile_pipe(pipe)

load_pipe(pipe, dir="cached_pipe")
```

## DeepCache speedup

### Run Stable Diffusion XL with OneDiffX
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,10 @@ def compile_pipe(


def save_pipe(
pipe, dst_dir="cached_pipe", *, ignores=(), overwrite=True
pipe, dir="cached_pipe", *, ignores=(), overwrite=True
):
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
if not os.path.exists(dir):
os.makedirs(dir)
filtered_parts = _filter_parts(ignores=ignores)
for part in filtered_parts:
obj = _recursive_getattr(pipe, part, None)
Expand All @@ -80,24 +80,24 @@ def save_pipe(
and obj._deployable_module_dpl_graph is not None
and obj.get_graph().is_compiled
):
if not overwrite and os.path.isfile(os.path.join(dst_dir, part)):
if not overwrite and os.path.isfile(os.path.join(dir, part)):
logger.info(f"Compiled graph already exists for {part}, not overwriting it.")
continue
logger.info(f"Saving {part}")
obj.save_graph(os.path.join(dst_dir, part))
obj.save_graph(os.path.join(dir, part))


def load_pipe(
pipe, src_dir="cached_pipe", *, ignores=(),
pipe, dir="cached_pipe", *, ignores=(),
):
if not os.path.exists(src_dir):
if not os.path.exists(dir):
return
filtered_parts = _filter_parts(ignores=ignores)
for part in filtered_parts:
obj = _recursive_getattr(pipe, part, None)
if obj is not None and os.path.exists(os.path.join(src_dir, part)):
if obj is not None and os.path.exists(os.path.join(dir, part)):
logger.info(f"Loading {part}")
obj.load_graph(os.path.join(src_dir, part))
obj.load_graph(os.path.join(dir, part))

if "image_processor" not in ignores:
logger.info("Patching image_processor")
Expand Down

0 comments on commit d3a5629

Please sign in to comment.