Skip to content

Commit

Permalink
merge __build__ and __enter__
Browse files Browse the repository at this point in the history
  • Loading branch information
erikbern committed Dec 20, 2023
1 parent 0b02172 commit d6f7437
Showing 1 changed file with 3 additions and 29 deletions.
32 changes: 3 additions & 29 deletions 06_gpu_and_ml/stable_diffusion/stable_diffusion_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,6 @@
# already inside the image.

model_id = "runwayml/stable-diffusion-v1-5"
cache_path = "/vol/cache"


image = (
Image.debian_slim(python_version="3.10")
Expand Down Expand Up @@ -100,29 +98,10 @@

@stub.cls(image=image, gpu="A10G")
class StableDiffusion:
def _download_models(self):
# Download scheduler configuration. Experiment with different schedulers
# to identify one that works best for your use-case.
def __enter__(self):
scheduler = diffusers.DPMSolverMultistepScheduler.from_pretrained(
model_id,
subfolder="scheduler",
cache_dir=cache_path,
)
scheduler.save_pretrained(cache_path, safe_serialization=True)

# Downloads all other models.
pipe = diffusers.StableDiffusionPipeline.from_pretrained(
model_id,
revision="fp16",
torch_dtype=torch.float16,
cache_dir=cache_path,
)
pipe.save_pretrained(cache_path, safe_serialization=True)

def _initialize(self):
scheduler = diffusers.DPMSolverMultistepScheduler.from_pretrained(
cache_path,
subfolder="scheduler",
solver_order=2,
prediction_type="epsilon",
thresholding=False,
Expand All @@ -133,19 +112,14 @@ def _initialize(self):
device_map="auto",
)
self.pipe = diffusers.StableDiffusionPipeline.from_pretrained(
cache_path,
model_id,
scheduler=scheduler,
low_cpu_mem_usage=True,
device_map="auto",
)
self.pipe.enable_xformers_memory_efficient_attention()

def __build__(self):
self._download_models()
self._initialize()

def __enter__(self):
self._initialize()
__build__ = __enter__

@method()
def run_inference(self, prompt: str, steps: int = 20, batch_size: int = 4):
Expand Down

0 comments on commit d6f7437

Please sign in to comment.