Commit 9e1fc80c authored by AUTOMATIC1111's avatar AUTOMATIC1111 Committed by GitHub

Merge pull request #15608 from drhead/patch-2

Add KL Optimal scheduler
parents e21b1e37 0edc04d1
...@@ -61,6 +61,14 @@ def get_align_your_steps_sigmas(n, sigma_min, sigma_max, device='cpu'): ...@@ -61,6 +61,14 @@ def get_align_your_steps_sigmas(n, sigma_min, sigma_max, device='cpu'):
return torch.FloatTensor(sigmas).to(device) return torch.FloatTensor(sigmas).to(device)
def kl_optimal(n, sigma_min, sigma_max, device):
alpha_min = torch.arctan(torch.tensor(sigma_min, device=device))
alpha_max = torch.arctan(torch.tensor(sigma_max, device=device))
step_indices = torch.arange(n + 1, device=device)
sigmas = torch.tan(step_indices / n * alpha_min + (1.0 - step_indices / n) * alpha_max)
return sigmas
schedulers = [ schedulers = [
Scheduler('automatic', 'Automatic', None), Scheduler('automatic', 'Automatic', None),
Scheduler('uniform', 'Uniform', uniform, need_inner_model=True), Scheduler('uniform', 'Uniform', uniform, need_inner_model=True),
...@@ -68,6 +76,7 @@ schedulers = [ ...@@ -68,6 +76,7 @@ schedulers = [
Scheduler('exponential', 'Exponential', k_diffusion.sampling.get_sigmas_exponential), Scheduler('exponential', 'Exponential', k_diffusion.sampling.get_sigmas_exponential),
Scheduler('polyexponential', 'Polyexponential', k_diffusion.sampling.get_sigmas_polyexponential, default_rho=1.0), Scheduler('polyexponential', 'Polyexponential', k_diffusion.sampling.get_sigmas_polyexponential, default_rho=1.0),
Scheduler('sgm_uniform', 'SGM Uniform', sgm_uniform, need_inner_model=True, aliases=["SGMUniform"]), Scheduler('sgm_uniform', 'SGM Uniform', sgm_uniform, need_inner_model=True, aliases=["SGMUniform"]),
Scheduler('kl_optimal', 'KL Optimal', kl_optimal),
Scheduler('align_your_steps', 'Align Your Steps', get_align_your_steps_sigmas), Scheduler('align_your_steps', 'Align Your Steps', get_align_your_steps_sigmas),
] ]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment