From 7f63b05432fce533605025fbe6e3fb557bc3e56b Mon Sep 17 00:00:00 2001 From: Thushit Kumar R <101030274+Senume@users.noreply.github.com> Date: Wed, 6 Dec 2023 00:20:33 +0530 Subject: [PATCH] Organized the code --- src/diffusers/schedulers/scheduling_euler_discrete.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/diffusers/schedulers/scheduling_euler_discrete.py b/src/diffusers/schedulers/scheduling_euler_discrete.py index 20e006e8625a..e7087edd4a87 100644 --- a/src/diffusers/schedulers/scheduling_euler_discrete.py +++ b/src/diffusers/schedulers/scheduling_euler_discrete.py @@ -280,11 +280,7 @@ def set_timesteps(self, num_inference_steps: int, device: Union[str, torch.devic sigmas = self._convert_to_karras(in_sigmas=sigmas, num_inference_steps=self.num_inference_steps) timesteps = np.array([self._sigma_to_t(sigma, log_sigmas) for sigma in sigmas]) - # Checking 'sigmas' type before datatype conversion - if isinstance(sigmas, np.ndarray): - sigmas = torch.from_numpy(sigmas).to(dtype=torch.float32, device=device) - else: - sigmas = sigmas.to(dtype=torch.float32, device=device) + sigmas = torch.from_numpy(sigmas).to(dtype=torch.float32, device=device) # TODO: Support the full EDM scalings for all prediction types and timestep types if self.config.timestep_type == "continuous" and self.config.prediction_type == "v_prediction":