22import torch
33import k_diffusion
44import numpy as np
5+ from scipy import stats
56
67from modules import shared
78
@@ -115,6 +116,17 @@ def ddim_scheduler(n, sigma_min, sigma_max, inner_model, device):
115116return torch .FloatTensor (sigs ).to (device )
116117
117118
119+ def beta_scheduler (n ,sigma_min ,sigma_max ,inner_model ,device ):
120+ # From "Beta Sampling is All You Need" [arXiv:2407.12173] (Lee et. al, 2024) """
121+ alpha = 0.6
122+ beta = 0.6
123+ timesteps = 1 - np .linspace (0 ,1 ,n )
124+ timesteps = [stats .beta .ppf (x ,alpha ,beta )for x in timesteps ]
125+ sigmas = [sigma_min + ((x )* (sigma_max - sigma_min ))for x in timesteps ]+ [0.0 ]
126+ sigmas = torch .FloatTensor (sigmas ).to (device )
127+ return sigmas
128+
129+
118130schedulers = [
119131Scheduler ('automatic' ,'Automatic' ,None ),
120132Scheduler ('uniform' ,'Uniform' ,uniform ,need_inner_model = True ),
@@ -127,6 +139,7 @@ def ddim_scheduler(n, sigma_min, sigma_max, inner_model, device):
127139Scheduler ('simple' ,'Simple' ,simple_scheduler ,need_inner_model = True ),
128140Scheduler ('normal' ,'Normal' ,normal_scheduler ,need_inner_model = True ),
129141Scheduler ('ddim' ,'DDIM' ,ddim_scheduler ,need_inner_model = True ),
142+ Scheduler ('beta' ,'Beta' ,beta_scheduler ,need_inner_model = True ),
130143]
131144
132145schedulers_map = {** {x .name :x for x in schedulers },** {x .label :x for x in schedulers }}