-
Notifications
You must be signed in to change notification settings - Fork 28
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[dicp][tops] Support stable-diffusion in tops (#568)
- Loading branch information
1 parent
6b76f2e
commit 934f32e
Showing
7 changed files
with
58 additions
and
93 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,9 @@ | ||
#!/usr/bin/env bash | ||
|
||
LLAMA_MODEL_DIR=$1 | ||
STABLE_DIFFUSION_MODEL_DIR=$2 | ||
|
||
export DIPU_MOCK_CUDA=True | ||
export LLAMA_MODEL_DIR=$1 | ||
export LLAMA_FINETUNE_DIR=$2 | ||
export STABLE_DIFFUSION_MODEL_DIR=$3 |
Binary file not shown.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
import pytest | ||
import torch | ||
import os | ||
import torch._dynamo as dynamo | ||
from ..common import utils | ||
import torch_dipu | ||
from diffusers import StableDiffusionPipeline | ||
dynamo.config.cache_size_limit = 128 | ||
utils.update_dynamo_config(False) | ||
device = utils.get_device() | ||
torch_dipu.dipu.set_device(device) | ||
models_dir = os.environ.get("STABLE_DIFFUSION_MODEL_DIR") | ||
assert models_dir is not None | ||
|
||
|
||
class TestStableDiffusion(): | ||
@pytest.mark.parametrize("model_path", [f"{models_dir}/stable-diffusion-2"]) | ||
@pytest.mark.parametrize("num_inference_steps", [50]) | ||
def test_inference( | ||
self, | ||
model_path: str, | ||
backend: str, | ||
dynamic: bool, | ||
num_inference_steps: int | ||
): | ||
prompt = "A photo of an astronaut riding a horse on mars." | ||
utils.update_dynamo_config(dynamic=dynamic) | ||
torch_dipu.dipu.set_device(device) | ||
|
||
dicp_pipe = StableDiffusionPipeline.from_pretrained(model_path).to(device) | ||
dicp_pipe.text_encoder = torch.compile(dicp_pipe.text_encoder, backend=backend) | ||
dicp_pipe.unet = torch.compile(dicp_pipe.unet, backend=backend) | ||
dicp_image = dicp_pipe(prompt, num_inference_steps=num_inference_steps).images[0] | ||
if backend == "ascendgraph": | ||
standard_output = torch.load("stable_diffusion/ascendgraph_output.pt") | ||
elif backend == "topsgraph": | ||
standard_output = torch.load("stable_diffusion/topsgraph_output.pt") | ||
else: | ||
raise ValueError("backend should in (ascendgrap, topsgraph)") | ||
dicp_output = torch.tensor(list(dicp_image.getdata())) | ||
assert torch.allclose(dicp_output, standard_output, equal_nan=True) |